mirror of
https://github.com/gocsaf/csaf.git
synced 2025-12-22 05:40:11 +01:00
Resolved merge conflict in csaf_provider.md
This commit is contained in:
commit
05915526c2
8 changed files with 141 additions and 26 deletions
|
|
@ -26,6 +26,8 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/ProtonMail/gopenpgp/v2/armor"
|
||||||
|
"github.com/ProtonMail/gopenpgp/v2/constants"
|
||||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||||
"github.com/csaf-poc/csaf_distribution/util"
|
"github.com/csaf-poc/csaf_distribution/util"
|
||||||
|
|
@ -490,7 +492,8 @@ func (w *worker) sign(data []byte) (string, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return sig.GetArmored()
|
return armor.ArmorWithTypeAndCustomHeaders(
|
||||||
|
sig.Data, constants.PGPSignatureHeader, "", "")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
|
func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
|
||||||
|
|
|
||||||
|
|
@ -329,8 +329,44 @@ func (p *processor) httpClient() util.Client {
|
||||||
|
|
||||||
var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`)
|
var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`)
|
||||||
|
|
||||||
|
// checkFile constructs the urls of a remote file.
|
||||||
|
type checkFile interface {
|
||||||
|
url() string
|
||||||
|
sha256() string
|
||||||
|
sha512() string
|
||||||
|
sign() string
|
||||||
|
}
|
||||||
|
|
||||||
|
// stringFile is a simple implementation of checkFile.
|
||||||
|
// The hash and signature files are directly constructed by extending
|
||||||
|
// the file name.
|
||||||
|
type stringFile string
|
||||||
|
|
||||||
|
func (sf stringFile) url() string { return string(sf) }
|
||||||
|
func (sf stringFile) sha256() string { return string(sf) + ".sha256" }
|
||||||
|
func (sf stringFile) sha512() string { return string(sf) + ".sha512" }
|
||||||
|
func (sf stringFile) sign() string { return string(sf) + ".asc" }
|
||||||
|
|
||||||
|
// hashFile is a more involed version of checkFile.
|
||||||
|
// Here each component can be given explicitly.
|
||||||
|
// If a component is not given it is constructed by
|
||||||
|
// extending the first component.
|
||||||
|
type hashFile [4]string
|
||||||
|
|
||||||
|
func (hf hashFile) name(i int, ext string) string {
|
||||||
|
if hf[i] != "" {
|
||||||
|
return hf[i]
|
||||||
|
}
|
||||||
|
return hf[0] + ext
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hf hashFile) url() string { return hf[0] }
|
||||||
|
func (hf hashFile) sha256() string { return hf.name(1, ".sha256") }
|
||||||
|
func (hf hashFile) sha512() string { return hf.name(2, ".sha512") }
|
||||||
|
func (hf hashFile) sign() string { return hf.name(3, ".asc") }
|
||||||
|
|
||||||
func (p *processor) integrity(
|
func (p *processor) integrity(
|
||||||
files []string,
|
files []checkFile,
|
||||||
base string,
|
base string,
|
||||||
mask whereType,
|
mask whereType,
|
||||||
lg func(MessageType, string, ...interface{}),
|
lg func(MessageType, string, ...interface{}),
|
||||||
|
|
@ -344,7 +380,7 @@ func (p *processor) integrity(
|
||||||
var data bytes.Buffer
|
var data bytes.Buffer
|
||||||
|
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
fp, err := url.Parse(f)
|
fp, err := url.Parse(f.url())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
lg(ErrorType, "Bad URL %s: %v", f, err)
|
lg(ErrorType, "Bad URL %s: %v", f, err)
|
||||||
continue
|
continue
|
||||||
|
|
@ -413,12 +449,18 @@ func (p *processor) integrity(
|
||||||
|
|
||||||
for _, x := range []struct {
|
for _, x := range []struct {
|
||||||
ext string
|
ext string
|
||||||
|
url func() string
|
||||||
hash []byte
|
hash []byte
|
||||||
}{
|
}{
|
||||||
{"sha256", s256.Sum(nil)},
|
{"SHA256", f.sha256, s256.Sum(nil)},
|
||||||
{"sha512", s512.Sum(nil)},
|
{"SHA512", f.sha512, s512.Sum(nil)},
|
||||||
} {
|
} {
|
||||||
hashFile := u + "." + x.ext
|
hu, err := url.Parse(x.url())
|
||||||
|
if err != nil {
|
||||||
|
lg(ErrorType, "Bad URL %s: %v", x.url(), err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
hashFile := b.ResolveReference(hu).String()
|
||||||
p.checkTLS(hashFile)
|
p.checkTLS(hashFile)
|
||||||
if res, err = client.Get(hashFile); err != nil {
|
if res, err = client.Get(hashFile); err != nil {
|
||||||
p.badIntegrities.error("Fetching %s failed: %v.", hashFile, err)
|
p.badIntegrities.error("Fetching %s failed: %v.", hashFile, err)
|
||||||
|
|
@ -443,12 +485,17 @@ func (p *processor) integrity(
|
||||||
}
|
}
|
||||||
if !bytes.Equal(h, x.hash) {
|
if !bytes.Equal(h, x.hash) {
|
||||||
p.badIntegrities.error("%s hash of %s does not match %s.",
|
p.badIntegrities.error("%s hash of %s does not match %s.",
|
||||||
strings.ToUpper(x.ext), u, hashFile)
|
x.ext, u, hashFile)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check signature
|
// Check signature
|
||||||
sigFile := u + ".asc"
|
su, err := url.Parse(f.sign())
|
||||||
|
if err != nil {
|
||||||
|
lg(ErrorType, "Bad URL %s: %v", f.sign(), err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
sigFile := b.ResolveReference(su).String()
|
||||||
p.checkTLS(sigFile)
|
p.checkTLS(sigFile)
|
||||||
|
|
||||||
p.badSignatures.use()
|
p.badSignatures.use()
|
||||||
|
|
@ -545,7 +592,60 @@ func (p *processor) processROLIEFeed(feed string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract the CSAF files from feed.
|
// Extract the CSAF files from feed.
|
||||||
files := rfeed.Files("self")
|
var files []checkFile
|
||||||
|
|
||||||
|
rfeed.Entries(func(entry *csaf.Entry) {
|
||||||
|
|
||||||
|
var url, sha256, sha512, sign string
|
||||||
|
|
||||||
|
for i := range entry.Link {
|
||||||
|
link := &entry.Link[i]
|
||||||
|
lower := strings.ToLower(link.HRef)
|
||||||
|
switch link.Rel {
|
||||||
|
case "self":
|
||||||
|
if !strings.HasSuffix(lower, ".json") {
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed entry link %s in %s with "rel": "self" has unexpected file extension.`,
|
||||||
|
link.HRef, feed)
|
||||||
|
}
|
||||||
|
url = link.HRef
|
||||||
|
case "signature":
|
||||||
|
if !strings.HasSuffix(lower, ".asc") {
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed entry link %s in %s with "rel": "signature" has unexpected file extension.`,
|
||||||
|
link.HRef, feed)
|
||||||
|
}
|
||||||
|
sign = link.HRef
|
||||||
|
case "hash":
|
||||||
|
switch {
|
||||||
|
case strings.HasSuffix(lower, "sha256"):
|
||||||
|
sha256 = link.HRef
|
||||||
|
case strings.HasSuffix(lower, "sha512"):
|
||||||
|
sha512 = link.HRef
|
||||||
|
default:
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed entry link %s in %s with "rel": "hash" has unsupported file extension.`,
|
||||||
|
link.HRef, feed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if url == "" {
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed %s contains entry link with no "self" URL.`, feed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var file checkFile
|
||||||
|
|
||||||
|
if sha256 != "" || sha512 != "" || sign != "" {
|
||||||
|
file = hashFile{url, sha256, sha512, sign}
|
||||||
|
} else {
|
||||||
|
file = stringFile(url)
|
||||||
|
}
|
||||||
|
|
||||||
|
files = append(files, file)
|
||||||
|
})
|
||||||
|
|
||||||
if err := p.integrity(files, base, rolieMask, p.badProviderMetadata.add); err != nil &&
|
if err := p.integrity(files, base, rolieMask, p.badProviderMetadata.add); err != nil &&
|
||||||
err != errContinue {
|
err != errContinue {
|
||||||
|
|
@ -588,12 +688,12 @@ func (p *processor) checkIndex(base string, mask whereType) error {
|
||||||
return errContinue
|
return errContinue
|
||||||
}
|
}
|
||||||
|
|
||||||
files, err := func() ([]string, error) {
|
files, err := func() ([]checkFile, error) {
|
||||||
defer res.Body.Close()
|
defer res.Body.Close()
|
||||||
var files []string
|
var files []checkFile
|
||||||
scanner := bufio.NewScanner(res.Body)
|
scanner := bufio.NewScanner(res.Body)
|
||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
files = append(files, scanner.Text())
|
files = append(files, stringFile(scanner.Text()))
|
||||||
}
|
}
|
||||||
return files, scanner.Err()
|
return files, scanner.Err()
|
||||||
}()
|
}()
|
||||||
|
|
@ -630,10 +730,10 @@ func (p *processor) checkChanges(base string, mask whereType) error {
|
||||||
return errContinue
|
return errContinue
|
||||||
}
|
}
|
||||||
|
|
||||||
times, files, err := func() ([]time.Time, []string, error) {
|
times, files, err := func() ([]time.Time, []checkFile, error) {
|
||||||
defer res.Body.Close()
|
defer res.Body.Close()
|
||||||
var times []time.Time
|
var times []time.Time
|
||||||
var files []string
|
var files []checkFile
|
||||||
c := csv.NewReader(res.Body)
|
c := csv.NewReader(res.Body)
|
||||||
for {
|
for {
|
||||||
r, err := c.Read()
|
r, err := c.Read()
|
||||||
|
|
@ -650,7 +750,7 @@ func (p *processor) checkChanges(base string, mask whereType) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
times, files = append(times, t), append(files, r[1])
|
times, files = append(times, t), append(files, stringFile(r[1]))
|
||||||
}
|
}
|
||||||
return times, files, nil
|
return times, files, nil
|
||||||
}()
|
}()
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,8 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/ProtonMail/gopenpgp/v2/armor"
|
||||||
|
"github.com/ProtonMail/gopenpgp/v2/constants"
|
||||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||||
"github.com/csaf-poc/csaf_distribution/util"
|
"github.com/csaf-poc/csaf_distribution/util"
|
||||||
|
|
@ -112,7 +114,8 @@ func (c *controller) handleSignature(
|
||||||
return "", nil, err
|
return "", nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
armored, err := sig.GetArmored()
|
armored, err := armor.ArmorWithTypeAndCustomHeaders(
|
||||||
|
sig.Data, constants.PGPSignatureHeader, "", "")
|
||||||
return armored, key, err
|
return armored, key, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,8 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/ProtonMail/gopenpgp/v2/armor"
|
||||||
|
"github.com/ProtonMail/gopenpgp/v2/constants"
|
||||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||||
"github.com/csaf-poc/csaf_distribution/util"
|
"github.com/csaf-poc/csaf_distribution/util"
|
||||||
|
|
@ -239,7 +241,8 @@ func (p *processor) uploadRequest(filename string) (*http.Request, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
armored, err := sig.GetArmored()
|
armored, err := armor.ArmorWithTypeAndCustomHeaders(
|
||||||
|
sig.Data, constants.PGPSignatureHeader, "", "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -116,6 +116,13 @@ func (rf *ROLIEFeed) Files(filter string) []string {
|
||||||
return files
|
return files
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Entries visits the entries of this feed.
|
||||||
|
func (rf *ROLIEFeed) Entries(fn func(*Entry)) {
|
||||||
|
for _, e := range rf.Feed.Entry {
|
||||||
|
fn(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// SortEntriesByUpdated sorts all the entries in the feed
|
// SortEntriesByUpdated sorts all the entries in the feed
|
||||||
// by their update times.
|
// by their update times.
|
||||||
func (rf *ROLIEFeed) SortEntriesByUpdated() {
|
func (rf *ROLIEFeed) SortEntriesByUpdated() {
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ a web browser.
|
||||||
### Configure nginx
|
### Configure nginx
|
||||||
Assuming the relevant server block is in `/etc/nginx/sites-enabled/default` and the CA used to verify the client certificates is under `/etc/ssl/`,
|
Assuming the relevant server block is in `/etc/nginx/sites-enabled/default` and the CA used to verify the client certificates is under `/etc/ssl/`,
|
||||||
adjust the content of the `server{}` block like shown in the following example:
|
adjust the content of the `server{}` block like shown in the following example:
|
||||||
<!-- MARKDOWN-AUTO-DOCS:START (CODE:src=../docs/scripts/TLSClientConfigsForITest.sh&lines=25-40) -->
|
<!-- MARKDOWN-AUTO-DOCS:START (CODE:src=../docs/scripts/TLSClientConfigsForITest.sh&lines=25-38) -->
|
||||||
<!-- The below code snippet is automatically added from ../docs/scripts/TLSClientConfigsForITest.sh -->
|
<!-- The below code snippet is automatically added from ../docs/scripts/TLSClientConfigsForITest.sh -->
|
||||||
```sh
|
```sh
|
||||||
ssl_client_certificate '${SSL_CLIENT_CERTIFICATE}'; # e.g. ssl_client_certificate /etc/ssl/rootca-cert.pem;
|
ssl_client_certificate '${SSL_CLIENT_CERTIFICATE}'; # e.g. ssl_client_certificate /etc/ssl/rootca-cert.pem;
|
||||||
|
|
@ -20,9 +20,7 @@ adjust the content of the `server{}` block like shown in the following example:
|
||||||
autoindex on;
|
autoindex on;
|
||||||
# in this location access is only allowed with client certs
|
# in this location access is only allowed with client certs
|
||||||
if ($ssl_client_verify != SUCCESS){
|
if ($ssl_client_verify != SUCCESS){
|
||||||
# we use status code 404 == "Not Found", because we do
|
return 403;
|
||||||
# not want to reveal if files within this location exist or not.
|
|
||||||
return 404;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,8 @@
|
||||||
`csaf_provider` implements the CGI interface for webservers and reads its configuration from a [TOML](https://toml.io/en/) file.
|
`csaf_provider` implements the CGI interface for webservers
|
||||||
The [setup docs](../README.md#setup-trusted-provider) explain how to wire this up with nginx and where the config file lives.
|
and reads its configuration from a [TOML](https://toml.io/en/) file.
|
||||||
|
The [setup docs](../README.md#setup-trusted-provider)
|
||||||
|
explain how to wire this up with nginx and where the config file lives.
|
||||||
|
|
||||||
|
|
||||||
## Provider options
|
## Provider options
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -33,9 +33,7 @@ echo '
|
||||||
autoindex on;
|
autoindex on;
|
||||||
# in this location access is only allowed with client certs
|
# in this location access is only allowed with client certs
|
||||||
if ($ssl_client_verify != SUCCESS){
|
if ($ssl_client_verify != SUCCESS){
|
||||||
# we use status code 404 == "Not Found", because we do
|
return 403;
|
||||||
# not want to reveal if files within this location exist or not.
|
|
||||||
return 404;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
'> ~/${FOLDERNAME}/clientCertificateConfigs.txt
|
'> ~/${FOLDERNAME}/clientCertificateConfigs.txt
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue