1
0
Fork 0
mirror of https://github.com/gocsaf/csaf.git synced 2025-12-22 11:55:40 +01:00

Add CSAF downloader

* Dense and refactor ROLIE code in aggregator a bit.
* Move  advisory file processor to csaf package.
* Fix minor typo on main readme
This commit is contained in:
Sascha L. Teichmann 2022-06-23 14:14:44 +02:00 committed by GitHub
parent 640ef64df9
commit b359fd0a62
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 929 additions and 239 deletions

View file

@ -26,6 +26,7 @@ jobs:
./TLSClientConfigsForITest.sh ./TLSClientConfigsForITest.sh
./setupProviderForITest.sh ./setupProviderForITest.sh
./testAggregator.sh ./testAggregator.sh
./testDownloader.sh
shell: bash shell: bash
- name: Upload test results - name: Upload test results

View file

@ -76,9 +76,9 @@ dist: build_linux build_win
mkdir -p dist mkdir -p dist
mkdir -p dist/$(DISTDIR)-windows-amd64/bin-windows-amd64 mkdir -p dist/$(DISTDIR)-windows-amd64/bin-windows-amd64
cp README.md dist/$(DISTDIR)-windows-amd64 cp README.md dist/$(DISTDIR)-windows-amd64
cp bin-windows-amd64/csaf_uploader.exe bin-windows-amd64/csaf_checker.exe dist/$(DISTDIR)-windows-amd64/bin-windows-amd64/ cp bin-windows-amd64/csaf_uploader.exe bin-windows-amd64/csaf_checker.exe bin-windows-amd64/csaf_downloader.exe dist/$(DISTDIR)-windows-amd64/bin-windows-amd64/
mkdir -p dist/$(DISTDIR)-windows-amd64/docs mkdir -p dist/$(DISTDIR)-windows-amd64/docs
cp docs/csaf_uploader.md docs/csaf_checker.md dist/$(DISTDIR)-windows-amd64/docs cp docs/csaf_uploader.md docs/csaf_checker.md docs/csaf_downloader.md dist/$(DISTDIR)-windows-amd64/docs
mkdir dist/$(DISTDIR)-gnulinux-amd64 mkdir dist/$(DISTDIR)-gnulinux-amd64
cp -r README.md docs bin-linux-amd64 dist/$(DISTDIR)-gnulinux-amd64 cp -r README.md docs bin-linux-amd64 dist/$(DISTDIR)-gnulinux-amd64
cd dist/ ; zip -r $(DISTDIR)-windows-amd64.zip $(DISTDIR)-windows-amd64/ cd dist/ ; zip -r $(DISTDIR)-windows-amd64.zip $(DISTDIR)-windows-amd64/

View file

@ -1,8 +1,8 @@
# csaf_distribution # csaf_distribution
An implementation of a [CSAF 2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/csd02/csaf-v2.0-csd02.html) trusted provider, checker and aggregator. Includes an uploader command line tool for the trusted provider. An implementation of a [CSAF 2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/csd02/csaf-v2.0-csd02.html) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider.
Status: Beta (ready for more testing, but known short comings, see issues) Status: Beta (ready for more testing, but known shortcomings see issues)
## [csaf_provider](docs/csaf_provider.md) ## [csaf_provider](docs/csaf_provider.md)
@ -18,6 +18,9 @@ is an implementation of the role CSAF Aggregator.
## [csaf_checker](docs/csaf_checker.md) ## [csaf_checker](docs/csaf_checker.md)
is a tool for testing a CSAF Trusted Provider according to [Section 7 of the CSAF standard](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html#7-distributing-csaf-documents). is a tool for testing a CSAF Trusted Provider according to [Section 7 of the CSAF standard](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html#7-distributing-csaf-documents).
## [csaf_downloader](docs/csaf_downloader.md)
is a tool for downloading advisories from a provider.
## Setup ## Setup
Note that the server side is only tested Note that the server side is only tested
and the binaries available for GNU/Linux-Systems, e.g. Ubuntu LTS. and the binaries available for GNU/Linux-Systems, e.g. Ubuntu LTS.

View file

@ -233,26 +233,3 @@ func (w *worker) writeIndices() error {
return nil return nil
} }
// loadIndex loads baseURL/index.txt and returns a list of files
// prefixed by baseURL/.
func (w *worker) loadIndex(baseURL string) ([]string, error) {
indexURL := baseURL + "/index.txt"
resp, err := w.client.Get(indexURL)
if err != nil {
return nil, err
}
defer resp.Body.Close()
var lines []string
scanner := bufio.NewScanner(resp.Body)
for scanner.Scan() {
lines = append(lines, baseURL+"/"+scanner.Text())
}
if err := scanner.Err(); err != nil {
return nil, err
}
return lines, nil
}

View file

@ -26,7 +26,7 @@ type options struct {
func errCheck(err error) { func errCheck(err error) {
if err != nil { if err != nil {
if e, ok := err.(*flags.Error); ok && e.Type == flags.ErrHelp { if flags.WroteHelp(err) {
os.Exit(0) os.Exit(0)
} }
log.Fatalf("error: %v\n", err) log.Fatalf("error: %v\n", err)

View file

@ -29,76 +29,11 @@ import (
"github.com/ProtonMail/gopenpgp/v2/armor" "github.com/ProtonMail/gopenpgp/v2/armor"
"github.com/ProtonMail/gopenpgp/v2/constants" "github.com/ProtonMail/gopenpgp/v2/constants"
"github.com/ProtonMail/gopenpgp/v2/crypto" "github.com/ProtonMail/gopenpgp/v2/crypto"
"github.com/csaf-poc/csaf_distribution/csaf" "github.com/csaf-poc/csaf_distribution/csaf"
"github.com/csaf-poc/csaf_distribution/util" "github.com/csaf-poc/csaf_distribution/util"
) )
func (w *worker) handleROLIE(
rolie interface{},
process func(*csaf.TLPLabel, []string) error,
) error {
base, err := url.Parse(w.loc)
if err != nil {
return err
}
var feeds [][]csaf.Feed
if err := util.ReMarshalJSON(&feeds, rolie); err != nil {
return err
}
log.Printf("Found %d ROLIE feed(s).\n", len(feeds))
for _, fs := range feeds {
for i := range fs {
feed := &fs[i]
if feed.URL == nil {
continue
}
up, err := url.Parse(string(*feed.URL))
if err != nil {
log.Printf("Invalid URL %s in feed: %v.", *feed.URL, err)
continue
}
feedURL := base.ResolveReference(up).String()
log.Printf("Feed URL: %s\n", feedURL)
fb, err := util.BaseURL(feedURL)
if err != nil {
log.Printf("error: Invalid feed base URL '%s': %v\n", fb, err)
continue
}
feedBaseURL, err := url.Parse(fb)
if err != nil {
log.Printf("error: Cannot parse feed base URL '%s': %v\n", fb, err)
continue
}
res, err := w.client.Get(feedURL)
if err != nil {
log.Printf("error: Cannot get feed '%s'\n", err)
continue
}
if res.StatusCode != http.StatusOK {
log.Printf("error: Fetching %s failed. Status code %d (%s)",
feedURL, res.StatusCode, res.Status)
continue
}
rfeed, err := func() (*csaf.ROLIEFeed, error) {
defer res.Body.Close()
return csaf.LoadROLIEFeed(res.Body)
}()
if err != nil {
log.Printf("Loading ROLIE feed failed: %v.", err)
continue
}
files := resolveURLs(rfeed.Files("self"), feedBaseURL)
if err := process(feed.TLPLabel, files); err != nil {
return err
}
}
}
return nil
}
// mirrorAllowed checks if mirroring is allowed. // mirrorAllowed checks if mirroring is allowed.
func (w *worker) mirrorAllowed() bool { func (w *worker) mirrorAllowed() bool {
var b bool var b bool
@ -129,38 +64,20 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) {
// Collecting the summaries of the advisories. // Collecting the summaries of the advisories.
w.summaries = make(map[string][]summary) w.summaries = make(map[string][]summary)
// Check if we have ROLIE feeds. base, err := url.Parse(w.loc)
rolie, err := w.expr.Eval(
"$.distributions[*].rolie.feeds", w.metadataProvider)
if err != nil { if err != nil {
log.Printf("rolie check failed: %v\n", err)
return nil, err return nil, err
} }
fs, hasRolie := rolie.([]interface{}) afp := csaf.NewAdvisoryFileProcessor(
hasRolie = hasRolie && len(fs) > 0 w.client,
w.expr,
w.metadataProvider,
base)
if hasRolie { if err := afp.Process(w.mirrorFiles); err != nil {
if err := w.handleROLIE(rolie, w.mirrorFiles); err != nil {
return nil, err return nil, err
} }
} else {
// No rolie feeds -> try to load files from index.txt
baseURL, err := util.BaseURL(w.loc)
if err != nil {
return nil, err
}
files, err := w.loadIndex(baseURL)
if err != nil {
return nil, err
}
_ = files
// XXX: Is treating as white okay? better look into the advisories?
white := csaf.TLPLabel(csaf.TLPLabelWhite)
if err := w.mirrorFiles(&white, files); err != nil {
return nil, err
}
} // TODO: else scan directories?
if err := w.writeIndices(); err != nil { if err := w.writeIndices(); err != nil {
return nil, err return nil, err
@ -496,11 +413,8 @@ func (w *worker) sign(data []byte) (string, error) {
sig.Data, constants.PGPSignatureHeader, "", "") sig.Data, constants.PGPSignatureHeader, "", "")
} }
func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error { func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) error {
label := "unknown" label := strings.ToLower(string(tlpLabel))
if tlpLabel != nil {
label = strings.ToLower(string(*tlpLabel))
}
summaries := w.summaries[label] summaries := w.summaries[label]
@ -514,7 +428,7 @@ func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
yearDirs := make(map[int]string) yearDirs := make(map[int]string)
for _, file := range files { for _, file := range files {
u, err := url.Parse(file) u, err := url.Parse(file.URL())
if err != nil { if err != nil {
log.Printf("error: %s\n", err) log.Printf("error: %s\n", err)
continue continue
@ -539,7 +453,7 @@ func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
return json.NewDecoder(tee).Decode(&advisory) return json.NewDecoder(tee).Decode(&advisory)
} }
if err := downloadJSON(w.client, file, download); err != nil { if err := downloadJSON(w.client, file.URL(), download); err != nil {
log.Printf("error: %v\n", err) log.Printf("error: %v\n", err)
continue continue
} }
@ -578,7 +492,7 @@ func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
summaries = append(summaries, summary{ summaries = append(summaries, summary{
filename: filename, filename: filename,
summary: sum, summary: sum,
url: file, url: file.URL(),
}) })
year := sum.InitialReleaseDate.Year() year := sum.InitialReleaseDate.Year()
@ -604,7 +518,7 @@ func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
} }
// Try to fetch signature file. // Try to fetch signature file.
sigURL := file + ".asc" sigURL := file.SignURL()
ascFile := fname + ".asc" ascFile := fname + ".asc"
if err := w.downloadSignatureOrSign(sigURL, ascFile, data); err != nil { if err := w.downloadSignatureOrSign(sigURL, ascFile, data); err != nil {
return err return err

View file

@ -1,28 +0,0 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"log"
"net/url"
)
// resolveURLs resolves a list of URLs urls against a base URL base.
func resolveURLs(urls []string, base *url.URL) []string {
out := make([]string, 0, len(urls))
for _, u := range urls {
p, err := url.Parse(u)
if err != nil {
log.Printf("error: Invalid URL '%s': %v\n", u, err)
continue
}
out = append(out, base.ResolveReference(p).String())
}
return out
}

View file

@ -26,7 +26,12 @@ type (
) )
func (pgs pages) listed(path string, pro *processor) (bool, error) { func (pgs pages) listed(path string, pro *processor) (bool, error) {
base, err := util.BaseURL(path) pathURL, err := url.Parse(path)
if err != nil {
return false, err
}
base, err := util.BaseURL(pathURL)
if err != nil { if err != nil {
return false, err return false, err
} }

View file

@ -38,7 +38,7 @@ type options struct {
func errCheck(err error) { func errCheck(err error) {
if err != nil { if err != nil {
if e, ok := err.(*flags.Error); ok && e.Type == flags.ErrHelp { if flags.WroteHelp(err) {
os.Exit(0) os.Exit(0)
} }
log.Fatalf("error: %v\n", err) log.Fatalf("error: %v\n", err)

View file

@ -329,44 +329,8 @@ func (p *processor) httpClient() util.Client {
var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`) var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`)
// checkFile constructs the urls of a remote file.
type checkFile interface {
url() string
sha256() string
sha512() string
sign() string
}
// stringFile is a simple implementation of checkFile.
// The hash and signature files are directly constructed by extending
// the file name.
type stringFile string
func (sf stringFile) url() string { return string(sf) }
func (sf stringFile) sha256() string { return string(sf) + ".sha256" }
func (sf stringFile) sha512() string { return string(sf) + ".sha512" }
func (sf stringFile) sign() string { return string(sf) + ".asc" }
// hashFile is a more involed version of checkFile.
// Here each component can be given explicitly.
// If a component is not given it is constructed by
// extending the first component.
type hashFile [4]string
func (hf hashFile) name(i int, ext string) string {
if hf[i] != "" {
return hf[i]
}
return hf[0] + ext
}
func (hf hashFile) url() string { return hf[0] }
func (hf hashFile) sha256() string { return hf.name(1, ".sha256") }
func (hf hashFile) sha512() string { return hf.name(2, ".sha512") }
func (hf hashFile) sign() string { return hf.name(3, ".asc") }
func (p *processor) integrity( func (p *processor) integrity(
files []checkFile, files []csaf.AdvisoryFile,
base string, base string,
mask whereType, mask whereType,
lg func(MessageType, string, ...interface{}), lg func(MessageType, string, ...interface{}),
@ -380,7 +344,7 @@ func (p *processor) integrity(
var data bytes.Buffer var data bytes.Buffer
for _, f := range files { for _, f := range files {
fp, err := url.Parse(f.url()) fp, err := url.Parse(f.URL())
if err != nil { if err != nil {
lg(ErrorType, "Bad URL %s: %v", f, err) lg(ErrorType, "Bad URL %s: %v", f, err)
continue continue
@ -452,8 +416,8 @@ func (p *processor) integrity(
url func() string url func() string
hash []byte hash []byte
}{ }{
{"SHA256", f.sha256, s256.Sum(nil)}, {"SHA256", f.SHA256URL, s256.Sum(nil)},
{"SHA512", f.sha512, s512.Sum(nil)}, {"SHA512", f.SHA512URL, s512.Sum(nil)},
} { } {
hu, err := url.Parse(x.url()) hu, err := url.Parse(x.url())
if err != nil { if err != nil {
@ -490,9 +454,9 @@ func (p *processor) integrity(
} }
// Check signature // Check signature
su, err := url.Parse(f.sign()) su, err := url.Parse(f.SignURL())
if err != nil { if err != nil {
lg(ErrorType, "Bad URL %s: %v", f.sign(), err) lg(ErrorType, "Bad URL %s: %v", f.SignURL(), err)
continue continue
} }
sigFile := b.ResolveReference(su).String() sigFile := b.ResolveReference(su).String()
@ -585,14 +549,20 @@ func (p *processor) processROLIEFeed(feed string) error {
} }
} }
base, err := util.BaseURL(feed) feedURL, err := url.Parse(feed)
if err != nil {
p.badProviderMetadata.error("Bad base path: %v", err)
return errContinue
}
base, err := util.BaseURL(feedURL)
if err != nil { if err != nil {
p.badProviderMetadata.error("Bad base path: %v", err) p.badProviderMetadata.error("Bad base path: %v", err)
return errContinue return errContinue
} }
// Extract the CSAF files from feed. // Extract the CSAF files from feed.
var files []checkFile var files []csaf.AdvisoryFile
rfeed.Entries(func(entry *csaf.Entry) { rfeed.Entries(func(entry *csaf.Entry) {
@ -636,12 +606,12 @@ func (p *processor) processROLIEFeed(feed string) error {
return return
} }
var file checkFile var file csaf.AdvisoryFile
if sha256 != "" || sha512 != "" || sign != "" { if sha256 != "" || sha512 != "" || sign != "" {
file = hashFile{url, sha256, sha512, sign} file = csaf.HashedAdvisoryFile{url, sha256, sha512, sign}
} else { } else {
file = stringFile(url) file = csaf.PlainAdvisoryFile(url)
} }
files = append(files, file) files = append(files, file)
@ -688,12 +658,12 @@ func (p *processor) checkIndex(base string, mask whereType) error {
return errContinue return errContinue
} }
files, err := func() ([]checkFile, error) { files, err := func() ([]csaf.AdvisoryFile, error) {
defer res.Body.Close() defer res.Body.Close()
var files []checkFile var files []csaf.AdvisoryFile
scanner := bufio.NewScanner(res.Body) scanner := bufio.NewScanner(res.Body)
for scanner.Scan() { for scanner.Scan() {
files = append(files, stringFile(scanner.Text())) files = append(files, csaf.PlainAdvisoryFile(scanner.Text()))
} }
return files, scanner.Err() return files, scanner.Err()
}() }()
@ -730,10 +700,10 @@ func (p *processor) checkChanges(base string, mask whereType) error {
return errContinue return errContinue
} }
times, files, err := func() ([]time.Time, []checkFile, error) { times, files, err := func() ([]time.Time, []csaf.AdvisoryFile, error) {
defer res.Body.Close() defer res.Body.Close()
var times []time.Time var times []time.Time
var files []checkFile var files []csaf.AdvisoryFile
c := csv.NewReader(res.Body) c := csv.NewReader(res.Body)
for { for {
r, err := c.Read() r, err := c.Read()
@ -750,7 +720,7 @@ func (p *processor) checkChanges(base string, mask whereType) error {
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
times, files = append(times, t), append(files, stringFile(r[1])) times, files = append(times, t), append(files, csaf.PlainAdvisoryFile(r[1]))
} }
return times, files, nil return times, files, nil
}() }()
@ -817,7 +787,11 @@ func (p *processor) checkCSAFs(domain string) error {
} }
// No rolie feeds // No rolie feeds
base, err := util.BaseURL(p.pmdURL) pmdURL, err := url.Parse(p.pmdURL)
if err != nil {
return err
}
base, err := util.BaseURL(pmdURL)
if err != nil { if err != nil {
return err return err
} }

View file

@ -0,0 +1,484 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"bytes"
"crypto/sha256"
"crypto/sha512"
"crypto/tls"
"encoding/json"
"fmt"
"hash"
"io"
"log"
"net/http"
"net/url"
"os"
"path"
"path/filepath"
"strings"
"github.com/ProtonMail/gopenpgp/v2/crypto"
"github.com/csaf-poc/csaf_distribution/csaf"
"github.com/csaf-poc/csaf_distribution/util"
"golang.org/x/time/rate"
)
type downloader struct {
client util.Client
opts *options
directory string
keys []*crypto.KeyRing
}
func (d *downloader) httpClient() util.Client {
if d.client != nil {
return d.client
}
hClient := http.Client{}
var tlsConfig tls.Config
if d.opts.Insecure {
tlsConfig.InsecureSkipVerify = true
hClient.Transport = &http.Transport{
TLSClientConfig: &tlsConfig,
}
}
var client util.Client
if d.opts.Verbose {
client = &util.LoggingClient{Client: &hClient}
} else {
client = &hClient
}
if d.opts.Rate == nil {
d.client = client
return client
}
d.client = &util.LimitingClient{
Client: client,
Limiter: rate.NewLimiter(rate.Limit(*d.opts.Rate), 1),
}
return d.client
}
func (d *downloader) loadProviderMetadataDirectly(path string) *csaf.LoadedProviderMetadata {
client := d.httpClient()
resp, err := client.Get(path)
if err != nil {
log.Printf("Error fetching '%s': %v\n", path, err)
return nil
}
if resp.StatusCode != http.StatusOK {
log.Printf(
"Error fetching '%s': %s (%d)\n", path, resp.Status, resp.StatusCode)
return nil
}
defer resp.Body.Close()
var doc interface{}
if err := json.NewDecoder(resp.Body).Decode(&doc); err != nil {
log.Printf("Decoding '%s' as JSON failed: %v\n", path, err)
return nil
}
errors, err := csaf.ValidateProviderMetadata(doc)
if err != nil {
log.Printf("Schema validation of '%s' failed: %v\n", path, err)
return nil
}
if len(errors) > 0 {
log.Printf(
"Schema validation of '%s' leads to %d issues.\n", path, len(errors))
return nil
}
return &csaf.LoadedProviderMetadata{
Document: doc,
URL: path,
}
}
func (d *downloader) download(domain string) error {
var lpmd *csaf.LoadedProviderMetadata
if strings.HasPrefix(domain, "https://") {
lpmd = d.loadProviderMetadataDirectly(domain)
} else {
lpmd = csaf.LoadProviderMetadataForDomain(
d.httpClient(), domain, func(format string, args ...interface{}) {
log.Printf(
"Looking for provider-metadata.json of '"+domain+"': "+format+"\n", args...)
})
}
if lpmd == nil {
return fmt.Errorf("no provider-metadata.json found for '%s'", domain)
}
base, err := url.Parse(lpmd.URL)
if err != nil {
return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err)
}
eval := util.NewPathEval()
if err := d.loadOpenPGPKeys(
d.httpClient(),
eval,
lpmd.Document,
base,
); err != nil {
return err
}
afp := csaf.NewAdvisoryFileProcessor(
d.httpClient(),
eval,
lpmd.Document,
base)
return afp.Process(d.downloadFiles)
}
func (d *downloader) loadOpenPGPKeys(
client util.Client,
eval *util.PathEval,
doc interface{},
base *url.URL,
) error {
src, err := eval.Eval("$.public_openpgp_keys", doc)
if err != nil {
// no keys.
return nil
}
var keys []csaf.PGPKey
if err := util.ReMarshalJSON(&keys, src); err != nil {
return err
}
if len(keys) == 0 {
return nil
}
// Try to load
for i := range keys {
key := &keys[i]
if key.URL == nil {
continue
}
up, err := url.Parse(*key.URL)
if err != nil {
log.Printf("Invalid URL '%s': %v", *key.URL, err)
continue
}
u := base.ResolveReference(up).String()
res, err := client.Get(u)
if err != nil {
log.Printf("Fetching public OpenPGP key %s failed: %v.", u, err)
continue
}
if res.StatusCode != http.StatusOK {
log.Printf("Fetching public OpenPGP key %s status code: %d (%s)",
u, res.StatusCode, res.Status)
continue
}
ckey, err := func() (*crypto.Key, error) {
defer res.Body.Close()
return crypto.NewKeyFromArmoredReader(res.Body)
}()
if err != nil {
log.Printf("Reading public OpenPGP key %s failed: %v", u, err)
continue
}
if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) {
log.Printf(
"Fingerprint of public OpenPGP key %s does not match remotely loaded.", u)
continue
}
keyring, err := crypto.NewKeyRing(ckey)
if err != nil {
log.Printf("Creating store for public OpenPGP key %s failed: %v.", u, err)
continue
}
d.keys = append(d.keys, keyring)
}
return nil
}
func (d *downloader) downloadFiles(label csaf.TLPLabel, files []csaf.AdvisoryFile) error {
client := d.httpClient()
var data bytes.Buffer
var lastDir string
for _, file := range files {
u, err := url.Parse(file.URL())
if err != nil {
log.Printf("Ignoring invalid URL: %s: %v\n", file.URL(), err)
continue
}
// Ignore not confirming filenames.
filename := filepath.Base(u.Path)
if !util.ConfirmingFileName(filename) {
log.Printf("Not confirming filename %q. Ignoring.\n", filename)
continue
}
resp, err := client.Get(file.URL())
if err != nil {
log.Printf("WARN: cannot get '%s': %v\n", file.URL(), err)
continue
}
if resp.StatusCode != http.StatusOK {
log.Printf("WARN: cannot load %s: %s (%d)\n",
file.URL(), resp.Status, resp.StatusCode)
continue
}
var (
writers []io.Writer
s256, s512 hash.Hash
s256Data, s512Data []byte
remoteSHA256, remoteSHA512 []byte
signData []byte
)
// Only hash when we have a remote counter part we can compare it with.
if remoteSHA256, s256Data, err = d.loadHash(file.SHA256URL()); err != nil {
if d.opts.Verbose {
log.Printf("WARN: cannot fetch %s: %v\n", file.SHA256URL(), err)
}
} else {
s256 = sha256.New()
writers = append(writers, s256)
}
if remoteSHA512, s512Data, err = d.loadHash(file.SHA512URL()); err != nil {
if d.opts.Verbose {
log.Printf("WARN: cannot fetch %s: %v\n", file.SHA512URL(), err)
}
} else {
s512 = sha512.New()
writers = append(writers, s512)
}
// Remember the data as we need to store it to file later.
data.Reset()
writers = append(writers, &data)
// Download the advisory and hash it.
hasher := io.MultiWriter(writers...)
var doc interface{}
if err := func() error {
defer resp.Body.Close()
tee := io.TeeReader(resp.Body, hasher)
return json.NewDecoder(tee).Decode(&doc)
}(); err != nil {
log.Printf("Downloading %s failed: %v", file.URL(), err)
continue
}
// Compare the checksums.
if s256 != nil && !bytes.Equal(s256.Sum(nil), remoteSHA256) {
log.Printf("SHA256 checksum of %s does not match.\n", file.URL())
continue
}
if s512 != nil && !bytes.Equal(s512.Sum(nil), remoteSHA512) {
log.Printf("SHA512 checksum of %s does not match.\n", file.URL())
continue
}
// Only check signature if we have loaded keys.
if len(d.keys) > 0 {
var sign *crypto.PGPSignature
sign, signData, err = d.loadSignature(file.SignURL())
if err != nil {
if d.opts.Verbose {
log.Printf("downloading signature '%s' failed: %v\n",
file.SignURL(), err)
}
}
if sign != nil {
if !d.checkSignature(data.Bytes(), sign) {
log.Printf("Cannot verify signature for %s\n", file.URL())
continue
}
}
}
// Validate against CSAF schema.
errors, err := csaf.ValidateCSAF(doc)
if err != nil {
log.Printf("Failed to validate %s: %v", file.URL(), err)
continue
}
if len(errors) > 0 {
log.Printf("CSAF file %s has %d validation errors.", file.URL(), len(errors))
continue
}
// Write advisory to file
newDir := path.Join(d.directory, string(label))
if newDir != lastDir {
if err := os.MkdirAll(newDir, 0755); err != nil {
return err
}
lastDir = newDir
}
path := filepath.Join(lastDir, filename)
if err := os.WriteFile(path, data.Bytes(), 0644); err != nil {
return err
}
// Write hash sums.
if s256Data != nil {
if err := os.WriteFile(path+".sha256", s256Data, 0644); err != nil {
return err
}
}
if s512Data != nil {
if err := os.WriteFile(path+".sha512", s512Data, 0644); err != nil {
return err
}
}
// Write signature.
if signData != nil {
if err := os.WriteFile(path+".asc", signData, 0644); err != nil {
return err
}
}
log.Printf("Written advisory '%s'.\n", path)
}
return nil
}
func (d *downloader) checkSignature(data []byte, sign *crypto.PGPSignature) bool {
pm := crypto.NewPlainMessage(data)
t := crypto.GetUnixTime()
for _, key := range d.keys {
if err := key.VerifyDetached(pm, sign, t); err == nil {
return true
}
}
return false
}
func (d *downloader) loadSignature(p string) (*crypto.PGPSignature, []byte, error) {
resp, err := d.httpClient().Get(p)
if err != nil {
return nil, nil, err
}
if resp.StatusCode != http.StatusOK {
return nil, nil, fmt.Errorf(
"fetching signature from '%s' failed: %s (%d)", p, resp.Status, resp.StatusCode)
}
defer resp.Body.Close()
data, err := io.ReadAll(resp.Body)
if err != nil {
return nil, nil, err
}
sign, err := crypto.NewPGPSignatureFromArmored(string(data))
if err != nil {
return nil, nil, err
}
return sign, data, nil
}
func (d *downloader) loadHash(p string) ([]byte, []byte, error) {
resp, err := d.httpClient().Get(p)
if err != nil {
return nil, nil, err
}
if resp.StatusCode != http.StatusOK {
return nil, nil, fmt.Errorf(
"fetching hash from '%s' failed: %s (%d)", p, resp.Status, resp.StatusCode)
}
defer resp.Body.Close()
var data bytes.Buffer
tee := io.TeeReader(resp.Body, &data)
hash, err := util.HashFromReader(tee)
if err != nil {
return nil, nil, err
}
return hash, data.Bytes(), nil
}
// prepareDirectory ensures that the working directory
// exists and is setup properly.
func (d *downloader) prepareDirectory() error {
// If no special given use current working directory.
if d.opts.Directory == nil {
dir, err := os.Getwd()
if err != nil {
return err
}
d.directory = dir
return nil
}
// Use given directory
if _, err := os.Stat(*d.opts.Directory); err != nil {
// If it does not exist create it.
if os.IsNotExist(err) {
if err = os.MkdirAll(*d.opts.Directory, 0755); err != nil {
return err
}
} else {
return err
}
}
d.directory = *d.opts.Directory
return nil
}
// run performs the downloads for all the given domains.
func (d *downloader) run(domains []string) error {
if err := d.prepareDirectory(); err != nil {
return err
}
for _, domain := range domains {
if err := d.download(domain); err != nil {
return err
}
}
return nil
}

View file

@ -0,0 +1,59 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"fmt"
"log"
"os"
"github.com/csaf-poc/csaf_distribution/util"
"github.com/jessevdk/go-flags"
)
type options struct {
Directory *string `short:"d" long:"directory" description:"Directory to store the downloaded files in"`
Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider"`
Version bool `long:"version" description:"Display version of the binary"`
Verbose bool `long:"verbose" short:"v" description:"Verbose output"`
Rate *float64 `long:"rate" short:"r" description:"The average upper limit of https operations per second"`
}
func errCheck(err error) {
if err != nil {
if flags.WroteHelp(err) {
os.Exit(0)
}
log.Fatalf("error: %v\n", err)
}
}
func main() {
opts := new(options)
parser := flags.NewParser(opts, flags.Default)
parser.Usage = "[OPTIONS] domain..."
domains, err := parser.Parse()
errCheck(err)
if opts.Version {
fmt.Println(util.SemVersion)
return
}
if len(domains) == 0 {
log.Println("No domains given.")
return
}
d := downloader{opts: opts}
errCheck(d.run(domains))
}

View file

@ -355,7 +355,7 @@ func readInteractive(prompt string, pw **string) error {
func check(err error) { func check(err error) {
if err != nil { if err != nil {
if e, ok := err.(*flags.Error); ok && e.Type == flags.ErrHelp { if flags.WroteHelp(err) {
os.Exit(0) os.Exit(0)
} }
log.Fatalf("error: %v\n", err) log.Fatalf("error: %v\n", err)

274
csaf/advisories.go Normal file
View file

@ -0,0 +1,274 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package csaf
import (
"bufio"
"log"
"net/http"
"net/url"
"strings"
"github.com/csaf-poc/csaf_distribution/util"
)
// AdvisoryFile constructs the urls of a remote file.
type AdvisoryFile interface {
URL() string
SHA256URL() string
SHA512URL() string
SignURL() string
}
// PlainAdvisoryFile is a simple implementation of checkFile.
// The hash and signature files are directly constructed by extending
// the file name.
type PlainAdvisoryFile string
// URL returns the URL of this advisory.
func (paf PlainAdvisoryFile) URL() string { return string(paf) }
// SHA256URL returns the URL of SHA256 hash file of this advisory.
func (paf PlainAdvisoryFile) SHA256URL() string { return string(paf) + ".sha256" }
// SHA512URL returns the URL of SHA512 hash file of this advisory.
func (paf PlainAdvisoryFile) SHA512URL() string { return string(paf) + ".sha512" }
// SignURL returns the URL of signature file of this advisory.
func (paf PlainAdvisoryFile) SignURL() string { return string(paf) + ".asc" }
// HashedAdvisoryFile is a more involed version of checkFile.
// Here each component can be given explicitly.
// If a component is not given it is constructed by
// extending the first component.
type HashedAdvisoryFile [4]string
func (haf HashedAdvisoryFile) name(i int, ext string) string {
if haf[i] != "" {
return haf[i]
}
return haf[0] + ext
}
// URL returns the URL of this advisory.
func (haf HashedAdvisoryFile) URL() string { return haf[0] }
// SHA256URL returns the URL of SHA256 hash file of this advisory.
func (haf HashedAdvisoryFile) SHA256URL() string { return haf.name(1, ".sha256") }
// SHA512URL returns the URL of SHA512 hash file of this advisory.
func (haf HashedAdvisoryFile) SHA512URL() string { return haf.name(2, ".sha512") }
// SignURL returns the URL of signature file of this advisory.
func (haf HashedAdvisoryFile) SignURL() string { return haf.name(3, ".asc") }
// AdvisoryFileProcessor implements the extraction of
// advisory file names from a given provider metadata.
type AdvisoryFileProcessor struct {
client util.Client
expr *util.PathEval
doc interface{}
base *url.URL
}
// NewAdvisoryFileProcessor constructs an filename extractor
// for a given metadata document.
func NewAdvisoryFileProcessor(
client util.Client,
expr *util.PathEval,
doc interface{},
base *url.URL,
) *AdvisoryFileProcessor {
return &AdvisoryFileProcessor{
client: client,
expr: expr,
doc: doc,
base: base,
}
}
// Process extracts the adivisory filenames and passes them with
// the corresponding label to fn.
func (afp *AdvisoryFileProcessor) Process(fn func(TLPLabel, []AdvisoryFile) error) error {
// Check if we have ROLIE feeds.
rolie, err := afp.expr.Eval(
"$.distributions[*].rolie.feeds", afp.doc)
if err != nil {
log.Printf("rolie check failed: %v\n", err)
return err
}
fs, hasRolie := rolie.([]interface{})
hasRolie = hasRolie && len(fs) > 0
if hasRolie {
var feeds [][]Feed
if err := util.ReMarshalJSON(&feeds, rolie); err != nil {
return err
}
log.Printf("Found %d ROLIE feed(s).\n", len(feeds))
for _, feed := range feeds {
if err := afp.processROLIE(feed, fn); err != nil {
return err
}
}
} else {
// No rolie feeds -> try to load files from index.txt
files, err := afp.loadIndex()
if err != nil {
return err
}
// XXX: Is treating as white okay? better look into the advisories?
if err := fn(TLPLabelWhite, files); err != nil {
return err
}
} // TODO: else scan directories?
return nil
}
// loadIndex loads baseURL/index.txt and returns a list of files
// prefixed by baseURL/.
func (afp *AdvisoryFileProcessor) loadIndex() ([]AdvisoryFile, error) {
baseURL, err := util.BaseURL(afp.base)
if err != nil {
return nil, err
}
indexURL := baseURL + "/index.txt"
resp, err := afp.client.Get(indexURL)
if err != nil {
return nil, err
}
defer resp.Body.Close()
var files []AdvisoryFile
scanner := bufio.NewScanner(resp.Body)
for scanner.Scan() {
files = append(files, PlainAdvisoryFile(baseURL+"/"+scanner.Text()))
}
if err := scanner.Err(); err != nil {
return nil, err
}
return files, nil
}
func (afp *AdvisoryFileProcessor) processROLIE(
labeledFeeds []Feed,
fn func(TLPLabel, []AdvisoryFile) error,
) error {
for i := range labeledFeeds {
feed := &labeledFeeds[i]
if feed.URL == nil {
continue
}
up, err := url.Parse(string(*feed.URL))
if err != nil {
log.Printf("Invalid URL %s in feed: %v.", *feed.URL, err)
continue
}
feedURL := afp.base.ResolveReference(up)
log.Printf("Feed URL: %s\n", feedURL)
fb, err := util.BaseURL(feedURL)
if err != nil {
log.Printf("error: Invalid feed base URL '%s': %v\n", fb, err)
continue
}
feedBaseURL, err := url.Parse(fb)
if err != nil {
log.Printf("error: Cannot parse feed base URL '%s': %v\n", fb, err)
continue
}
res, err := afp.client.Get(feedURL.String())
if err != nil {
log.Printf("error: Cannot get feed '%s'\n", err)
continue
}
if res.StatusCode != http.StatusOK {
log.Printf("error: Fetching %s failed. Status code %d (%s)",
feedURL, res.StatusCode, res.Status)
continue
}
rfeed, err := func() (*ROLIEFeed, error) {
defer res.Body.Close()
return LoadROLIEFeed(res.Body)
}()
if err != nil {
log.Printf("Loading ROLIE feed failed: %v.", err)
continue
}
var files []AdvisoryFile
resolve := func(u string) string {
if u == "" {
return ""
}
p, err := url.Parse(u)
if err != nil {
log.Printf("error: Invalid URL '%s': %v", u, err)
return ""
}
return feedBaseURL.ResolveReference(p).String()
}
rfeed.Entries(func(entry *Entry) {
var self, sha256, sha512, sign string
for i := range entry.Link {
link := &entry.Link[i]
lower := strings.ToLower(link.HRef)
switch link.Rel {
case "self":
self = resolve(link.HRef)
case "signature":
sign = resolve(link.HRef)
case "hash":
switch {
case strings.HasSuffix(lower, ".sha256"):
sha256 = resolve(link.HRef)
case strings.HasSuffix(lower, ".sha512"):
sha512 = resolve(link.HRef)
}
}
}
if self == "" {
return
}
var file AdvisoryFile
if sha256 != "" || sha512 != "" || sign != "" {
file = HashedAdvisoryFile{self, sha256, sha512, sign}
} else {
file = PlainAdvisoryFile(self)
}
files = append(files, file)
})
var label TLPLabel
if feed.TLPLabel != nil {
label = *feed.TLPLabel
} else {
label = "unknown"
}
if err := fn(label, files); err != nil {
return err
}
}
return nil
}

View file

@ -37,11 +37,11 @@ const (
) )
var tlpLabelPattern = alternativesUnmarshal( var tlpLabelPattern = alternativesUnmarshal(
string(TLPLabelUnlabeled), TLPLabelUnlabeled,
string(TLPLabelWhite), TLPLabelWhite,
string(TLPLabelGreen), TLPLabelGreen,
string(TLPLabelAmber), TLPLabelAmber,
string(TLPLabelRed), TLPLabelRed,
) )
// JSONURL is an URL to JSON document. // JSONURL is an URL to JSON document.

View file

@ -103,19 +103,6 @@ func (rf *ROLIEFeed) EntryByID(id string) *Entry {
return nil return nil
} }
// Files extracts the files from the feed.
func (rf *ROLIEFeed) Files(filter string) []string {
var files []string
for _, f := range rf.Feed.Entry {
for i := range f.Link {
if link := &f.Link[i]; link.Rel == filter {
files = append(files, link.HRef)
}
}
}
return files
}
// Entries visits the entries of this feed. // Entries visits the entries of this feed.
func (rf *ROLIEFeed) Entries(fn func(*Entry)) { func (rf *ROLIEFeed) Entries(fn func(*Entry)) {
for _, e := range rf.Feed.Entry { for _, e := range rf.Feed.Entry {

18
docs/csaf_downloader.md Normal file
View file

@ -0,0 +1,18 @@
## csaf_uploader
### Usage
```
Usage:
csaf_downloader [OPTIONS] domain...
Application Options:
-d, --directory= Directory to store the downloaded files in
--insecure Do not check TLS certificates from provider
--version Display version of the binary
-v, --verbose Verbose output
-r, --rate= The average upper limit of https operations per second
Help Options:
-h, --help Show this help message
```

View file

@ -25,4 +25,5 @@ Calling example (as root):
./TLSClientConfigsForITest.sh ./TLSClientConfigsForITest.sh
./setupProviderForITest.sh ./setupProviderForITest.sh
./testAggregator.sh ./testAggregator.sh
./testDownloader.sh
``` ```

25
docs/scripts/testDownloader.sh Executable file
View file

@ -0,0 +1,25 @@
#!/usr/bin/env bash
# This file is Free Software under the MIT License
# without warranty, see README.md and LICENSES/MIT.txt for details.
#
# SPDX-License-Identifier: MIT
#
# SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
# Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
set -e # to exit if a command in the script fails
echo
echo '==== run downloader'
cd ~/csaf_distribution
mkdir ~/downloaded1
./bin-linux-amd64/csaf_downloader --directory ../downloaded1 \
--rate 4.1 --verbose --insecure localhost
echo
echo '==== this was downloaded'
cd ~/downloaded1
find .

View file

@ -13,12 +13,8 @@ import (
"strings" "strings"
) )
// BaseURL returns the base URL for a given URL p. // BaseURL returns the base URL for a given URL.
func BaseURL(p string) (string, error) { func BaseURL(u *url.URL) (string, error) {
u, err := url.Parse(p)
if err != nil {
return "", err
}
ep := u.EscapedPath() ep := u.EscapedPath()
if idx := strings.LastIndexByte(ep, '/'); idx != -1 { if idx := strings.LastIndexByte(ep, '/'); idx != -1 {
ep = ep[:idx+1] ep = ep[:idx+1]