From fa434fa039013ba80ab044e476d1c9f9f6171e9d Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Tue, 14 Jun 2022 13:41:51 +0200 Subject: [PATCH] Improve checker regarding ROLIE feed advisory URLs, hashes and signatures * Add checking the ROLIE feed advisory URLs, hashes and signatures. --- cmd/csaf_checker/processor.go | 128 ++++++++++++++++++++++++++++++---- csaf/rolie.go | 7 ++ 2 files changed, 121 insertions(+), 14 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 98cf5f7..585b58e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -329,8 +329,44 @@ func (p *processor) httpClient() util.Client { var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`) +// checkFile constructs the urls of a remote file. +type checkFile interface { + url() string + sha256() string + sha512() string + sign() string +} + +// stringFile is a simple implementation of checkFile. +// The hash and signature files are directly constructed by extending +// the file name. +type stringFile string + +func (sf stringFile) url() string { return string(sf) } +func (sf stringFile) sha256() string { return string(sf) + ".sha256" } +func (sf stringFile) sha512() string { return string(sf) + ".sha512" } +func (sf stringFile) sign() string { return string(sf) + ".asc" } + +// hashFile is a more involed version of checkFile. +// Here each component can be given explicitly. +// If a component is not given it is constructed by +// extending the first component. +type hashFile [4]string + +func (hf hashFile) name(i int, ext string) string { + if hf[i] != "" { + return hf[i] + } + return hf[0] + ext +} + +func (hf hashFile) url() string { return hf[0] } +func (hf hashFile) sha256() string { return hf.name(1, ".sha256") } +func (hf hashFile) sha512() string { return hf.name(2, ".sha512") } +func (hf hashFile) sign() string { return hf.name(3, ".asc") } + func (p *processor) integrity( - files []string, + files []checkFile, base string, mask whereType, lg func(MessageType, string, ...interface{}), @@ -344,7 +380,7 @@ func (p *processor) integrity( var data bytes.Buffer for _, f := range files { - fp, err := url.Parse(f) + fp, err := url.Parse(f.url()) if err != nil { lg(ErrorType, "Bad URL %s: %v", f, err) continue @@ -413,12 +449,18 @@ func (p *processor) integrity( for _, x := range []struct { ext string + url func() string hash []byte }{ - {"sha256", s256.Sum(nil)}, - {"sha512", s512.Sum(nil)}, + {"SHA256", f.sha256, s256.Sum(nil)}, + {"SHA512", f.sha512, s512.Sum(nil)}, } { - hashFile := u + "." + x.ext + hu, err := url.Parse(x.url()) + if err != nil { + lg(ErrorType, "Bad URL %s: %v", x.url(), err) + continue + } + hashFile := b.ResolveReference(hu).String() p.checkTLS(hashFile) if res, err = client.Get(hashFile); err != nil { p.badIntegrities.error("Fetching %s failed: %v.", hashFile, err) @@ -443,12 +485,17 @@ func (p *processor) integrity( } if !bytes.Equal(h, x.hash) { p.badIntegrities.error("%s hash of %s does not match %s.", - strings.ToUpper(x.ext), u, hashFile) + x.ext, u, hashFile) } } // Check signature - sigFile := u + ".asc" + su, err := url.Parse(f.sign()) + if err != nil { + lg(ErrorType, "Bad URL %s: %v", f.sign(), err) + continue + } + sigFile := b.ResolveReference(su).String() p.checkTLS(sigFile) p.badSignatures.use() @@ -545,7 +592,60 @@ func (p *processor) processROLIEFeed(feed string) error { } // Extract the CSAF files from feed. - files := rfeed.Files("self") + var files []checkFile + + rfeed.Entries(func(entry *csaf.Entry) { + + var url, sha256, sha512, sign string + + for i := range entry.Link { + link := &entry.Link[i] + lower := strings.ToLower(link.HRef) + switch link.Rel { + case "self": + if !strings.HasSuffix(lower, ".json") { + p.badProviderMetadata.warn( + `ROLIE feed entry link %s in %s with "rel": "self" has unexpected file extension.`, + link.HRef, feed) + } + url = link.HRef + case "signature": + if !strings.HasSuffix(lower, ".asc") { + p.badProviderMetadata.warn( + `ROLIE feed entry link %s in %s with "rel": "signature" has unexpected file extension.`, + link.HRef, feed) + } + sign = link.HRef + case "hash": + switch { + case strings.HasSuffix(lower, "sha256"): + sha256 = link.HRef + case strings.HasSuffix(lower, "sha512"): + sha512 = link.HRef + default: + p.badProviderMetadata.warn( + `ROLIE feed entry link %s in %s with "rel": "hash" has unsupported file extension.`, + link.HRef, feed) + } + } + } + + if url == "" { + p.badProviderMetadata.warn( + `ROLIE feed %s contains entry link with no "self" URL.`, feed) + return + } + + var file checkFile + + if sha256 != "" || sha512 != "" || sign != "" { + file = hashFile{url, sha256, sha512, sign} + } else { + file = stringFile(url) + } + + files = append(files, file) + }) if err := p.integrity(files, base, rolieMask, p.badProviderMetadata.add); err != nil && err != errContinue { @@ -588,12 +688,12 @@ func (p *processor) checkIndex(base string, mask whereType) error { return errContinue } - files, err := func() ([]string, error) { + files, err := func() ([]checkFile, error) { defer res.Body.Close() - var files []string + var files []checkFile scanner := bufio.NewScanner(res.Body) for scanner.Scan() { - files = append(files, scanner.Text()) + files = append(files, stringFile(scanner.Text())) } return files, scanner.Err() }() @@ -630,10 +730,10 @@ func (p *processor) checkChanges(base string, mask whereType) error { return errContinue } - times, files, err := func() ([]time.Time, []string, error) { + times, files, err := func() ([]time.Time, []checkFile, error) { defer res.Body.Close() var times []time.Time - var files []string + var files []checkFile c := csv.NewReader(res.Body) for { r, err := c.Read() @@ -650,7 +750,7 @@ func (p *processor) checkChanges(base string, mask whereType) error { if err != nil { return nil, nil, err } - times, files = append(times, t), append(files, r[1]) + times, files = append(times, t), append(files, stringFile(r[1])) } return times, files, nil }() diff --git a/csaf/rolie.go b/csaf/rolie.go index ac9c4c0..5478bf6 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -116,6 +116,13 @@ func (rf *ROLIEFeed) Files(filter string) []string { return files } +// Entries visits the entries of this feed. +func (rf *ROLIEFeed) Entries(fn func(*Entry)) { + for _, e := range rf.Feed.Entry { + fn(e) + } +} + // SortEntriesByUpdated sorts all the entries in the feed // by their update times. func (rf *ROLIEFeed) SortEntriesByUpdated() {