mirror of
https://github.com/gocsaf/csaf.git
synced 2025-12-22 11:55:40 +01:00
Improve checker regarding ROLIE feed advisory URLs, hashes and signatures
* Add checking the ROLIE feed advisory URLs, hashes and signatures.
This commit is contained in:
parent
86a6f9abde
commit
fa434fa039
2 changed files with 121 additions and 14 deletions
|
|
@ -329,8 +329,44 @@ func (p *processor) httpClient() util.Client {
|
||||||
|
|
||||||
var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`)
|
var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`)
|
||||||
|
|
||||||
|
// checkFile constructs the urls of a remote file.
|
||||||
|
type checkFile interface {
|
||||||
|
url() string
|
||||||
|
sha256() string
|
||||||
|
sha512() string
|
||||||
|
sign() string
|
||||||
|
}
|
||||||
|
|
||||||
|
// stringFile is a simple implementation of checkFile.
|
||||||
|
// The hash and signature files are directly constructed by extending
|
||||||
|
// the file name.
|
||||||
|
type stringFile string
|
||||||
|
|
||||||
|
func (sf stringFile) url() string { return string(sf) }
|
||||||
|
func (sf stringFile) sha256() string { return string(sf) + ".sha256" }
|
||||||
|
func (sf stringFile) sha512() string { return string(sf) + ".sha512" }
|
||||||
|
func (sf stringFile) sign() string { return string(sf) + ".asc" }
|
||||||
|
|
||||||
|
// hashFile is a more involed version of checkFile.
|
||||||
|
// Here each component can be given explicitly.
|
||||||
|
// If a component is not given it is constructed by
|
||||||
|
// extending the first component.
|
||||||
|
type hashFile [4]string
|
||||||
|
|
||||||
|
func (hf hashFile) name(i int, ext string) string {
|
||||||
|
if hf[i] != "" {
|
||||||
|
return hf[i]
|
||||||
|
}
|
||||||
|
return hf[0] + ext
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hf hashFile) url() string { return hf[0] }
|
||||||
|
func (hf hashFile) sha256() string { return hf.name(1, ".sha256") }
|
||||||
|
func (hf hashFile) sha512() string { return hf.name(2, ".sha512") }
|
||||||
|
func (hf hashFile) sign() string { return hf.name(3, ".asc") }
|
||||||
|
|
||||||
func (p *processor) integrity(
|
func (p *processor) integrity(
|
||||||
files []string,
|
files []checkFile,
|
||||||
base string,
|
base string,
|
||||||
mask whereType,
|
mask whereType,
|
||||||
lg func(MessageType, string, ...interface{}),
|
lg func(MessageType, string, ...interface{}),
|
||||||
|
|
@ -344,7 +380,7 @@ func (p *processor) integrity(
|
||||||
var data bytes.Buffer
|
var data bytes.Buffer
|
||||||
|
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
fp, err := url.Parse(f)
|
fp, err := url.Parse(f.url())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
lg(ErrorType, "Bad URL %s: %v", f, err)
|
lg(ErrorType, "Bad URL %s: %v", f, err)
|
||||||
continue
|
continue
|
||||||
|
|
@ -413,12 +449,18 @@ func (p *processor) integrity(
|
||||||
|
|
||||||
for _, x := range []struct {
|
for _, x := range []struct {
|
||||||
ext string
|
ext string
|
||||||
|
url func() string
|
||||||
hash []byte
|
hash []byte
|
||||||
}{
|
}{
|
||||||
{"sha256", s256.Sum(nil)},
|
{"SHA256", f.sha256, s256.Sum(nil)},
|
||||||
{"sha512", s512.Sum(nil)},
|
{"SHA512", f.sha512, s512.Sum(nil)},
|
||||||
} {
|
} {
|
||||||
hashFile := u + "." + x.ext
|
hu, err := url.Parse(x.url())
|
||||||
|
if err != nil {
|
||||||
|
lg(ErrorType, "Bad URL %s: %v", x.url(), err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
hashFile := b.ResolveReference(hu).String()
|
||||||
p.checkTLS(hashFile)
|
p.checkTLS(hashFile)
|
||||||
if res, err = client.Get(hashFile); err != nil {
|
if res, err = client.Get(hashFile); err != nil {
|
||||||
p.badIntegrities.error("Fetching %s failed: %v.", hashFile, err)
|
p.badIntegrities.error("Fetching %s failed: %v.", hashFile, err)
|
||||||
|
|
@ -443,12 +485,17 @@ func (p *processor) integrity(
|
||||||
}
|
}
|
||||||
if !bytes.Equal(h, x.hash) {
|
if !bytes.Equal(h, x.hash) {
|
||||||
p.badIntegrities.error("%s hash of %s does not match %s.",
|
p.badIntegrities.error("%s hash of %s does not match %s.",
|
||||||
strings.ToUpper(x.ext), u, hashFile)
|
x.ext, u, hashFile)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check signature
|
// Check signature
|
||||||
sigFile := u + ".asc"
|
su, err := url.Parse(f.sign())
|
||||||
|
if err != nil {
|
||||||
|
lg(ErrorType, "Bad URL %s: %v", f.sign(), err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
sigFile := b.ResolveReference(su).String()
|
||||||
p.checkTLS(sigFile)
|
p.checkTLS(sigFile)
|
||||||
|
|
||||||
p.badSignatures.use()
|
p.badSignatures.use()
|
||||||
|
|
@ -545,7 +592,60 @@ func (p *processor) processROLIEFeed(feed string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract the CSAF files from feed.
|
// Extract the CSAF files from feed.
|
||||||
files := rfeed.Files("self")
|
var files []checkFile
|
||||||
|
|
||||||
|
rfeed.Entries(func(entry *csaf.Entry) {
|
||||||
|
|
||||||
|
var url, sha256, sha512, sign string
|
||||||
|
|
||||||
|
for i := range entry.Link {
|
||||||
|
link := &entry.Link[i]
|
||||||
|
lower := strings.ToLower(link.HRef)
|
||||||
|
switch link.Rel {
|
||||||
|
case "self":
|
||||||
|
if !strings.HasSuffix(lower, ".json") {
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed entry link %s in %s with "rel": "self" has unexpected file extension.`,
|
||||||
|
link.HRef, feed)
|
||||||
|
}
|
||||||
|
url = link.HRef
|
||||||
|
case "signature":
|
||||||
|
if !strings.HasSuffix(lower, ".asc") {
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed entry link %s in %s with "rel": "signature" has unexpected file extension.`,
|
||||||
|
link.HRef, feed)
|
||||||
|
}
|
||||||
|
sign = link.HRef
|
||||||
|
case "hash":
|
||||||
|
switch {
|
||||||
|
case strings.HasSuffix(lower, "sha256"):
|
||||||
|
sha256 = link.HRef
|
||||||
|
case strings.HasSuffix(lower, "sha512"):
|
||||||
|
sha512 = link.HRef
|
||||||
|
default:
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed entry link %s in %s with "rel": "hash" has unsupported file extension.`,
|
||||||
|
link.HRef, feed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if url == "" {
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed %s contains entry link with no "self" URL.`, feed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var file checkFile
|
||||||
|
|
||||||
|
if sha256 != "" || sha512 != "" || sign != "" {
|
||||||
|
file = hashFile{url, sha256, sha512, sign}
|
||||||
|
} else {
|
||||||
|
file = stringFile(url)
|
||||||
|
}
|
||||||
|
|
||||||
|
files = append(files, file)
|
||||||
|
})
|
||||||
|
|
||||||
if err := p.integrity(files, base, rolieMask, p.badProviderMetadata.add); err != nil &&
|
if err := p.integrity(files, base, rolieMask, p.badProviderMetadata.add); err != nil &&
|
||||||
err != errContinue {
|
err != errContinue {
|
||||||
|
|
@ -588,12 +688,12 @@ func (p *processor) checkIndex(base string, mask whereType) error {
|
||||||
return errContinue
|
return errContinue
|
||||||
}
|
}
|
||||||
|
|
||||||
files, err := func() ([]string, error) {
|
files, err := func() ([]checkFile, error) {
|
||||||
defer res.Body.Close()
|
defer res.Body.Close()
|
||||||
var files []string
|
var files []checkFile
|
||||||
scanner := bufio.NewScanner(res.Body)
|
scanner := bufio.NewScanner(res.Body)
|
||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
files = append(files, scanner.Text())
|
files = append(files, stringFile(scanner.Text()))
|
||||||
}
|
}
|
||||||
return files, scanner.Err()
|
return files, scanner.Err()
|
||||||
}()
|
}()
|
||||||
|
|
@ -630,10 +730,10 @@ func (p *processor) checkChanges(base string, mask whereType) error {
|
||||||
return errContinue
|
return errContinue
|
||||||
}
|
}
|
||||||
|
|
||||||
times, files, err := func() ([]time.Time, []string, error) {
|
times, files, err := func() ([]time.Time, []checkFile, error) {
|
||||||
defer res.Body.Close()
|
defer res.Body.Close()
|
||||||
var times []time.Time
|
var times []time.Time
|
||||||
var files []string
|
var files []checkFile
|
||||||
c := csv.NewReader(res.Body)
|
c := csv.NewReader(res.Body)
|
||||||
for {
|
for {
|
||||||
r, err := c.Read()
|
r, err := c.Read()
|
||||||
|
|
@ -650,7 +750,7 @@ func (p *processor) checkChanges(base string, mask whereType) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
times, files = append(times, t), append(files, r[1])
|
times, files = append(times, t), append(files, stringFile(r[1]))
|
||||||
}
|
}
|
||||||
return times, files, nil
|
return times, files, nil
|
||||||
}()
|
}()
|
||||||
|
|
|
||||||
|
|
@ -116,6 +116,13 @@ func (rf *ROLIEFeed) Files(filter string) []string {
|
||||||
return files
|
return files
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Entries visits the entries of this feed.
|
||||||
|
func (rf *ROLIEFeed) Entries(fn func(*Entry)) {
|
||||||
|
for _, e := range rf.Feed.Entry {
|
||||||
|
fn(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// SortEntriesByUpdated sorts all the entries in the feed
|
// SortEntriesByUpdated sorts all the entries in the feed
|
||||||
// by their update times.
|
// by their update times.
|
||||||
func (rf *ROLIEFeed) SortEntriesByUpdated() {
|
func (rf *ROLIEFeed) SortEntriesByUpdated() {
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue