mirror of
https://github.com/gocsaf/csaf.git
synced 2025-12-22 18:15:42 +01:00
Simplified requirement 15 (#369)
* Add badROLIEfeed as Topic Message * Use badROLIEfeed to guarantee existant TLP labels White, Green or unlabeled. (Test not implemented) * syntax * Formatting * Add Tlp check, completion struct * Add mismatch to completion, add function checkCompletion to fill mismatch and also give an error if invalid tlp levels have been used * formatting * Add function to remove incomplete csaf feeds from list of complete csaf feeds for a given tlp level * Add checkSummary function that checks whether a given feed would qualify as summary feed between all currently checked feeds * Add completed check of tlp levels * Add checks for correct hashes and signatures in ROLIE feed * formatting * Add rolieFeedReporter functionality * fix typo * Add todo, add return values to functions * Switch error, ... return value so error returns last * Fix typo * Remove hash/sig checks that don't work, improve ROLIE message * Add handling for advisories without tlp level * Formatting * Clean up rolie checks. * Started with simplifying rolie checking * Every ROLIE with data should have a summary. * Clean up ROLIE feed label checker. * if no TLP level can be extracted, return Unlabeled, not WHITE * Add handling of advisories whose tlp exists, but has no label * Also check TLP Red for completeness * Only remove advisory from remain when it has exactly the right tlp color. * Fix import in new rolie feed checker. * Update comment to reflect current functionality * Accept advisory of lesser tlp color in feed as completing. * Collect advisory labels from advisories. * Clarify that if no summary feed was found, it may exist but be either not listed or not accessible. * Do not clone advisory lookup before. * Move rolie check code to respective file. --------- Co-authored-by: JanHoefelmeyer <Jan Höfelmeyer jhoefelmeyer@intevation.de> Co-authored-by: JanHoefelmeyer <hoefelmeyer.jan@gmail.com>
This commit is contained in:
parent
3ff7e16569
commit
55540a32e0
3 changed files with 434 additions and 159 deletions
|
|
@ -52,6 +52,7 @@ type processor struct {
|
||||||
pmd256 []byte
|
pmd256 []byte
|
||||||
pmd any
|
pmd any
|
||||||
keys *crypto.KeyRing
|
keys *crypto.KeyRing
|
||||||
|
labelChecker *rolieLabelChecker
|
||||||
|
|
||||||
invalidAdvisories topicMessages
|
invalidAdvisories topicMessages
|
||||||
badFilenames topicMessages
|
badFilenames topicMessages
|
||||||
|
|
@ -66,6 +67,7 @@ type processor struct {
|
||||||
badWellknownMetadata topicMessages
|
badWellknownMetadata topicMessages
|
||||||
badDNSPath topicMessages
|
badDNSPath topicMessages
|
||||||
badDirListings topicMessages
|
badDirListings topicMessages
|
||||||
|
badROLIEfeed topicMessages
|
||||||
|
|
||||||
expr *util.PathEval
|
expr *util.PathEval
|
||||||
}
|
}
|
||||||
|
|
@ -218,6 +220,8 @@ func (p *processor) clean() {
|
||||||
p.badWellknownMetadata.reset()
|
p.badWellknownMetadata.reset()
|
||||||
p.badDNSPath.reset()
|
p.badDNSPath.reset()
|
||||||
p.badDirListings.reset()
|
p.badDirListings.reset()
|
||||||
|
p.badROLIEfeed.reset()
|
||||||
|
p.labelChecker = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// run calls checkDomain function for each domain in the given "domains" parameter.
|
// run calls checkDomain function for each domain in the given "domains" parameter.
|
||||||
|
|
@ -412,6 +416,129 @@ func (p *processor) httpClient() util.Client {
|
||||||
return p.client
|
return p.client
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// rolieFeedEntries loads the references to the advisory files for a given feed.
|
||||||
|
func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) {
|
||||||
|
|
||||||
|
client := p.httpClient()
|
||||||
|
res, err := client.Get(feed)
|
||||||
|
p.badDirListings.use()
|
||||||
|
if err != nil {
|
||||||
|
p.badProviderMetadata.error("Cannot fetch feed %s: %v", feed, err)
|
||||||
|
return nil, errContinue
|
||||||
|
}
|
||||||
|
if res.StatusCode != http.StatusOK {
|
||||||
|
p.badProviderMetadata.warn("Fetching %s failed. Status code %d (%s)",
|
||||||
|
feed, res.StatusCode, res.Status)
|
||||||
|
return nil, errContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
rfeed, rolieDoc, err := func() (*csaf.ROLIEFeed, any, error) {
|
||||||
|
defer res.Body.Close()
|
||||||
|
all, err := io.ReadAll(res.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
rfeed, err := csaf.LoadROLIEFeed(bytes.NewReader(all))
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("%s: %v", feed, err)
|
||||||
|
}
|
||||||
|
var rolieDoc any
|
||||||
|
err = json.NewDecoder(bytes.NewReader(all)).Decode(&rolieDoc)
|
||||||
|
return rfeed, rolieDoc, err
|
||||||
|
|
||||||
|
}()
|
||||||
|
if err != nil {
|
||||||
|
p.badProviderMetadata.error("Loading ROLIE feed failed: %v.", err)
|
||||||
|
return nil, errContinue
|
||||||
|
}
|
||||||
|
errors, err := csaf.ValidateROLIE(rolieDoc)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(errors) > 0 {
|
||||||
|
p.badProviderMetadata.error("%s: Validating against JSON schema failed:", feed)
|
||||||
|
for _, msg := range errors {
|
||||||
|
p.badProviderMetadata.error(strings.ReplaceAll(msg, `%`, `%%`))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract the CSAF files from feed.
|
||||||
|
var files []csaf.AdvisoryFile
|
||||||
|
|
||||||
|
rfeed.Entries(func(entry *csaf.Entry) {
|
||||||
|
|
||||||
|
// Filter if we have date checking.
|
||||||
|
if p.ageAccept != nil {
|
||||||
|
if pub := time.Time(entry.Published); !pub.IsZero() && !p.ageAccept(pub) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var url, sha256, sha512, sign string
|
||||||
|
for i := range entry.Link {
|
||||||
|
link := &entry.Link[i]
|
||||||
|
lower := strings.ToLower(link.HRef)
|
||||||
|
switch link.Rel {
|
||||||
|
case "self":
|
||||||
|
if !strings.HasSuffix(lower, ".json") {
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed entry link %s in %s with "rel": "self" has unexpected file extension.`,
|
||||||
|
link.HRef, feed)
|
||||||
|
}
|
||||||
|
url = link.HRef
|
||||||
|
case "signature":
|
||||||
|
if !strings.HasSuffix(lower, ".asc") {
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed entry link %s in %s with "rel": "signature" has unexpected file extension.`,
|
||||||
|
link.HRef, feed)
|
||||||
|
}
|
||||||
|
sign = link.HRef
|
||||||
|
case "hash":
|
||||||
|
switch {
|
||||||
|
case strings.HasSuffix(lower, "sha256"):
|
||||||
|
sha256 = link.HRef
|
||||||
|
case strings.HasSuffix(lower, "sha512"):
|
||||||
|
sha512 = link.HRef
|
||||||
|
default:
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed entry link %s in %s with "rel": "hash" has unsupported file extension.`,
|
||||||
|
link.HRef, feed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if url == "" {
|
||||||
|
p.badProviderMetadata.warn(
|
||||||
|
`ROLIE feed %s contains entry link with no "self" URL.`, feed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var file csaf.AdvisoryFile
|
||||||
|
|
||||||
|
if sha256 != "" || sha512 != "" || sign != "" {
|
||||||
|
file = csaf.HashedAdvisoryFile{url, sha256, sha512, sign}
|
||||||
|
} else {
|
||||||
|
file = csaf.PlainAdvisoryFile(url)
|
||||||
|
}
|
||||||
|
|
||||||
|
files = append(files, file)
|
||||||
|
})
|
||||||
|
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// makeAbsolute returns a function that checks if a given
|
||||||
|
// URL is absolute or not. If not it returns an
|
||||||
|
// absolute URL based on a given base URL.
|
||||||
|
func makeAbsolute(base *url.URL) func(*url.URL) *url.URL {
|
||||||
|
return func(u *url.URL) *url.URL {
|
||||||
|
if u.IsAbs() {
|
||||||
|
return u
|
||||||
|
}
|
||||||
|
return base.JoinPath(u.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`)
|
var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`)
|
||||||
|
|
||||||
func (p *processor) integrity(
|
func (p *processor) integrity(
|
||||||
|
|
@ -424,17 +551,11 @@ func (p *processor) integrity(
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
makeAbs := makeAbsolute(b)
|
||||||
client := p.httpClient()
|
client := p.httpClient()
|
||||||
|
|
||||||
var data bytes.Buffer
|
var data bytes.Buffer
|
||||||
|
|
||||||
makeAbs := func(u *url.URL) *url.URL {
|
|
||||||
if u.IsAbs() {
|
|
||||||
return u
|
|
||||||
}
|
|
||||||
return b.JoinPath(u.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
fp, err := url.Parse(f.URL())
|
fp, err := url.Parse(f.URL())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -456,7 +577,6 @@ func (p *processor) integrity(
|
||||||
}
|
}
|
||||||
|
|
||||||
var folderYear *int
|
var folderYear *int
|
||||||
|
|
||||||
if m := yearFromURL.FindStringSubmatch(u); m != nil {
|
if m := yearFromURL.FindStringSubmatch(u); m != nil {
|
||||||
year, _ := strconv.Atoi(m[1])
|
year, _ := strconv.Atoi(m[1])
|
||||||
// Check if we are in checking time interval.
|
// Check if we are in checking time interval.
|
||||||
|
|
@ -521,7 +641,6 @@ func (p *processor) integrity(
|
||||||
continue
|
continue
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate against remote validator.
|
// Validate against remote validator.
|
||||||
if p.validator != nil {
|
if p.validator != nil {
|
||||||
if rvr, err := p.validator.Validate(doc); err != nil {
|
if rvr, err := p.validator.Validate(doc); err != nil {
|
||||||
|
|
@ -531,6 +650,19 @@ func (p *processor) integrity(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Extract the tlp level of the entry
|
||||||
|
if tlpa, err := p.expr.Eval(
|
||||||
|
`$.document.distribution`, doc); err != nil {
|
||||||
|
p.badROLIEfeed.error(
|
||||||
|
"Extracting 'tlp level' from %s failed: %v", u, err)
|
||||||
|
} else {
|
||||||
|
tlpe := extractTLP(tlpa)
|
||||||
|
// check if current feed has correct or all of their tlp levels entries.
|
||||||
|
if p.labelChecker != nil {
|
||||||
|
p.labelChecker.check(p, tlpe, u)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Check if file is in the right folder.
|
// Check if file is in the right folder.
|
||||||
p.badFolders.use()
|
p.badFolders.use()
|
||||||
|
|
||||||
|
|
@ -567,6 +699,7 @@ func (p *processor) integrity(
|
||||||
}
|
}
|
||||||
hu = makeAbs(hu)
|
hu = makeAbs(hu)
|
||||||
hashFile := b.ResolveReference(hu).String()
|
hashFile := b.ResolveReference(hu).String()
|
||||||
|
|
||||||
p.checkTLS(hashFile)
|
p.checkTLS(hashFile)
|
||||||
if res, err = client.Get(hashFile); err != nil {
|
if res, err = client.Get(hashFile); err != nil {
|
||||||
p.badIntegrities.error("Fetching %s failed: %v.", hashFile, err)
|
p.badIntegrities.error("Fetching %s failed: %v.", hashFile, err)
|
||||||
|
|
@ -594,7 +727,6 @@ func (p *processor) integrity(
|
||||||
x.ext, u, hashFile)
|
x.ext, u, hashFile)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check signature
|
// Check signature
|
||||||
su, err := url.Parse(f.SignURL())
|
su, err := url.Parse(f.SignURL())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -639,133 +771,23 @@ func (p *processor) integrity(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *processor) processROLIEFeed(feed string) error {
|
// extractTLP tries to extract a valid TLP label from an advisory
|
||||||
client := p.httpClient()
|
// Returns "UNLABELED" if it does not exist, the label otherwise
|
||||||
res, err := client.Get(feed)
|
func extractTLP(tlpa any) csaf.TLPLabel {
|
||||||
p.badDirListings.use()
|
if distribution, ok := tlpa.(map[string]any); ok {
|
||||||
if err != nil {
|
if tlp, ok := distribution["tlp"]; ok {
|
||||||
p.badProviderMetadata.error("Cannot fetch feed %s: %v", feed, err)
|
if label, ok := tlp.(map[string]any); ok {
|
||||||
return errContinue
|
if labelstring, ok := label["label"].(string); ok {
|
||||||
}
|
return csaf.TLPLabel(labelstring)
|
||||||
if res.StatusCode != http.StatusOK {
|
|
||||||
p.badProviderMetadata.warn("Fetching %s failed. Status code %d (%s)",
|
|
||||||
feed, res.StatusCode, res.Status)
|
|
||||||
return errContinue
|
|
||||||
}
|
|
||||||
|
|
||||||
rfeed, rolieDoc, err := func() (*csaf.ROLIEFeed, any, error) {
|
|
||||||
defer res.Body.Close()
|
|
||||||
all, err := io.ReadAll(res.Body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
rfeed, err := csaf.LoadROLIEFeed(bytes.NewReader(all))
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, fmt.Errorf("%s: %v", feed, err)
|
|
||||||
}
|
|
||||||
var rolieDoc any
|
|
||||||
err = json.NewDecoder(bytes.NewReader(all)).Decode(&rolieDoc)
|
|
||||||
return rfeed, rolieDoc, err
|
|
||||||
|
|
||||||
}()
|
|
||||||
if err != nil {
|
|
||||||
p.badProviderMetadata.error("Loading ROLIE feed failed: %v.", err)
|
|
||||||
return errContinue
|
|
||||||
}
|
|
||||||
errors, err := csaf.ValidateROLIE(rolieDoc)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if len(errors) > 0 {
|
|
||||||
p.badProviderMetadata.error("%s: Validating against JSON schema failed:", feed)
|
|
||||||
for _, msg := range errors {
|
|
||||||
p.badProviderMetadata.error(strings.ReplaceAll(msg, `%`, `%%`))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
feedURL, err := url.Parse(feed)
|
|
||||||
if err != nil {
|
|
||||||
p.badProviderMetadata.error("Bad base path: %v", err)
|
|
||||||
return errContinue
|
|
||||||
}
|
|
||||||
|
|
||||||
base, err := util.BaseURL(feedURL)
|
|
||||||
if err != nil {
|
|
||||||
p.badProviderMetadata.error("Bad base path: %v", err)
|
|
||||||
return errContinue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract the CSAF files from feed.
|
|
||||||
var files []csaf.AdvisoryFile
|
|
||||||
|
|
||||||
rfeed.Entries(func(entry *csaf.Entry) {
|
|
||||||
|
|
||||||
// Filter if we have date checking.
|
|
||||||
if p.ageAccept != nil {
|
|
||||||
if pub := time.Time(entry.Published); !pub.IsZero() && !p.ageAccept(pub) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var url, sha256, sha512, sign string
|
|
||||||
|
|
||||||
for i := range entry.Link {
|
|
||||||
link := &entry.Link[i]
|
|
||||||
lower := strings.ToLower(link.HRef)
|
|
||||||
switch link.Rel {
|
|
||||||
case "self":
|
|
||||||
if !strings.HasSuffix(lower, ".json") {
|
|
||||||
p.badProviderMetadata.warn(
|
|
||||||
`ROLIE feed entry link %s in %s with "rel": "self" has unexpected file extension.`,
|
|
||||||
link.HRef, feed)
|
|
||||||
}
|
|
||||||
url = link.HRef
|
|
||||||
case "signature":
|
|
||||||
if !strings.HasSuffix(lower, ".asc") {
|
|
||||||
p.badProviderMetadata.warn(
|
|
||||||
`ROLIE feed entry link %s in %s with "rel": "signature" has unexpected file extension.`,
|
|
||||||
link.HRef, feed)
|
|
||||||
}
|
|
||||||
sign = link.HRef
|
|
||||||
case "hash":
|
|
||||||
switch {
|
|
||||||
case strings.HasSuffix(lower, "sha256"):
|
|
||||||
sha256 = link.HRef
|
|
||||||
case strings.HasSuffix(lower, "sha512"):
|
|
||||||
sha512 = link.HRef
|
|
||||||
default:
|
|
||||||
p.badProviderMetadata.warn(
|
|
||||||
`ROLIE feed entry link %s in %s with "rel": "hash" has unsupported file extension.`,
|
|
||||||
link.HRef, feed)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if url == "" {
|
|
||||||
p.badProviderMetadata.warn(
|
|
||||||
`ROLIE feed %s contains entry link with no "self" URL.`, feed)
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
return csaf.TLPLabelUnlabeled
|
||||||
var file csaf.AdvisoryFile
|
|
||||||
|
|
||||||
if sha256 != "" || sha512 != "" || sign != "" {
|
|
||||||
file = csaf.HashedAdvisoryFile{url, sha256, sha512, sign}
|
|
||||||
} else {
|
|
||||||
file = csaf.PlainAdvisoryFile(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
files = append(files, file)
|
|
||||||
})
|
|
||||||
if err := p.integrity(files, base, rolieMask, p.badProviderMetadata.add); err != nil &&
|
|
||||||
err != errContinue {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// checkIndex fetches the "index.txt" and calls "checkTLS" method for HTTPS checks.
|
// checkIndex fetches the "index.txt" and calls "checkTLS" method for HTTPS checks.
|
||||||
|
|
@ -824,6 +846,9 @@ func (p *processor) checkIndex(base string, mask whereType) error {
|
||||||
p.badIntegrities.warn("index.txt contains no URLs")
|
p.badIntegrities.warn("index.txt contains no URLs")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Block rolie checks.
|
||||||
|
p.labelChecker = nil
|
||||||
|
|
||||||
return p.integrity(files, base, mask, p.badIndices.add)
|
return p.integrity(files, base, mask, p.badIndices.add)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -916,36 +941,12 @@ func (p *processor) checkChanges(base string, mask whereType) error {
|
||||||
p.badChanges.error("%s is not sorted in descending order", changes)
|
p.badChanges.error("%s is not sorted in descending order", changes)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Block rolie checks.
|
||||||
|
p.labelChecker = nil
|
||||||
|
|
||||||
return p.integrity(files, base, mask, p.badChanges.add)
|
return p.integrity(files, base, mask, p.badChanges.add)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error {
|
|
||||||
|
|
||||||
base, err := url.Parse(p.pmdURL)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, fs := range feeds {
|
|
||||||
for i := range fs {
|
|
||||||
feed := &fs[i]
|
|
||||||
if feed.URL == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
up, err := url.Parse(string(*feed.URL))
|
|
||||||
if err != nil {
|
|
||||||
p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
feedURL := base.ResolveReference(up).String()
|
|
||||||
p.checkTLS(feedURL)
|
|
||||||
if err := p.processROLIEFeed(feedURL); err != nil && err != errContinue {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// empty checks if list of strings contains at least one none empty string.
|
// empty checks if list of strings contains at least one none empty string.
|
||||||
func empty(arr []string) bool {
|
func empty(arr []string) bool {
|
||||||
for _, s := range arr {
|
for _, s := range arr {
|
||||||
|
|
|
||||||
|
|
@ -364,8 +364,17 @@ func (r *directoryListingsReporter) report(p *processor, domain *Domain) {
|
||||||
// given TLP level and whether any of the TLP levels
|
// given TLP level and whether any of the TLP levels
|
||||||
// TLP:WHITE, TLP:GREEN or unlabeled exists and sets the "message" field value
|
// TLP:WHITE, TLP:GREEN or unlabeled exists and sets the "message" field value
|
||||||
// of the "Requirement" struct as a result of that.
|
// of the "Requirement" struct as a result of that.
|
||||||
func (r *rolieFeedReporter) report(_ *processor, _ *Domain) {
|
func (r *rolieFeedReporter) report(p *processor, domain *Domain) {
|
||||||
// TODO
|
req := r.requirement(domain)
|
||||||
|
if !p.badROLIEfeed.used() {
|
||||||
|
req.message(InfoType, "No checks on the validity of ROLIE feeds performed.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(p.badROLIEfeed) == 0 {
|
||||||
|
req.message(InfoType, "All checked ROLIE feeds validated fine.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
req.Messages = p.badROLIEfeed
|
||||||
}
|
}
|
||||||
|
|
||||||
// report tests whether a ROLIE service document is used and if so,
|
// report tests whether a ROLIE service document is used and if so,
|
||||||
|
|
|
||||||
265
cmd/csaf_checker/roliecheck.go
Normal file
265
cmd/csaf_checker/roliecheck.go
Normal file
|
|
@ -0,0 +1,265 @@
|
||||||
|
// This file is Free Software under the MIT License
|
||||||
|
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
//
|
||||||
|
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||||
|
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/url"
|
||||||
|
|
||||||
|
"github.com/csaf-poc/csaf_distribution/v2/csaf"
|
||||||
|
"github.com/csaf-poc/csaf_distribution/v2/util"
|
||||||
|
)
|
||||||
|
|
||||||
|
// rolieLabelChecker helps to check id advisories in ROLIE feeds
|
||||||
|
// are in there right TLP color.
|
||||||
|
type rolieLabelChecker struct {
|
||||||
|
feedURL string
|
||||||
|
feedLabel csaf.TLPLabel
|
||||||
|
|
||||||
|
advisories map[csaf.TLPLabel]map[string]struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// tlpLevel returns an inclusion order of TLP colors.
|
||||||
|
func tlpLevel(label csaf.TLPLabel) int {
|
||||||
|
switch label {
|
||||||
|
case csaf.TLPLabelWhite:
|
||||||
|
return 1
|
||||||
|
case csaf.TLPLabelGreen:
|
||||||
|
return 2
|
||||||
|
case csaf.TLPLabelAmber:
|
||||||
|
return 3
|
||||||
|
case csaf.TLPLabelRed:
|
||||||
|
return 4
|
||||||
|
default:
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// tlpLabel returns the value of a none-nil pointer
|
||||||
|
// to a TLPLabel. If pointer is nil unlabeled is returned.
|
||||||
|
func tlpLabel(label *csaf.TLPLabel) csaf.TLPLabel {
|
||||||
|
if label != nil {
|
||||||
|
return *label
|
||||||
|
}
|
||||||
|
return csaf.TLPLabelUnlabeled
|
||||||
|
}
|
||||||
|
|
||||||
|
// check tests if in advisory is in the right TLP color of the
|
||||||
|
// currently tested feed.
|
||||||
|
func (ca *rolieLabelChecker) check(
|
||||||
|
p *processor,
|
||||||
|
advisoryLabel csaf.TLPLabel,
|
||||||
|
advisory string,
|
||||||
|
) {
|
||||||
|
// Assign int to tlp levels for easy comparison
|
||||||
|
var (
|
||||||
|
advisoryRank = tlpLevel(advisoryLabel)
|
||||||
|
feedRank = tlpLevel(ca.feedLabel)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Associate advisory label to urls.
|
||||||
|
advs := ca.advisories[advisoryLabel]
|
||||||
|
if advs == nil {
|
||||||
|
advs = make(map[string]struct{})
|
||||||
|
ca.advisories[advisoryLabel] = advs
|
||||||
|
}
|
||||||
|
advs[advisory] = struct{}{}
|
||||||
|
|
||||||
|
// If entry shows up in feed of higher tlp level,
|
||||||
|
// give out info or warning
|
||||||
|
switch {
|
||||||
|
case advisoryRank < feedRank:
|
||||||
|
if advisoryRank == 0 { // All kinds of 'UNLABELED'
|
||||||
|
p.badROLIEfeed.info(
|
||||||
|
"Found unlabeled advisory %q in feed %q.",
|
||||||
|
advisory, ca.feedURL)
|
||||||
|
} else {
|
||||||
|
p.badROLIEfeed.warn(
|
||||||
|
"Found advisory %q labled TLP:%s in feed %q (TLP:%s).",
|
||||||
|
advisory, advisoryLabel,
|
||||||
|
ca.feedURL, ca.feedLabel)
|
||||||
|
}
|
||||||
|
|
||||||
|
case advisoryRank > feedRank:
|
||||||
|
// Must not happen, give error
|
||||||
|
p.badROLIEfeed.error(
|
||||||
|
"%s of TLP level %s must not be listed in feed %s of TLP level %s",
|
||||||
|
advisory, advisoryLabel, ca.feedURL, ca.feedLabel)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// processROLIEFeeds goes through all ROLIE feeds and checks there
|
||||||
|
// integriry and completeness.
|
||||||
|
func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error {
|
||||||
|
|
||||||
|
base, err := url.Parse(p.pmdURL)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
p.badROLIEfeed.use()
|
||||||
|
|
||||||
|
advisories := map[*csaf.Feed][]csaf.AdvisoryFile{}
|
||||||
|
|
||||||
|
// Phase 1: load all advisories urls.
|
||||||
|
for _, fs := range feeds {
|
||||||
|
for i := range fs {
|
||||||
|
feed := &fs[i]
|
||||||
|
if feed.URL == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
up, err := url.Parse(string(*feed.URL))
|
||||||
|
if err != nil {
|
||||||
|
p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
feedBase := base.ResolveReference(up)
|
||||||
|
feedURL := feedBase.String()
|
||||||
|
p.checkTLS(feedURL)
|
||||||
|
|
||||||
|
advs, err := p.rolieFeedEntries(feedURL)
|
||||||
|
if err != nil {
|
||||||
|
if err != errContinue {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
advisories[feed] = advs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 2: check for integrity.
|
||||||
|
for _, fs := range feeds {
|
||||||
|
for i := range fs {
|
||||||
|
feed := &fs[i]
|
||||||
|
if feed.URL == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
files := advisories[feed]
|
||||||
|
if files == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
up, err := url.Parse(string(*feed.URL))
|
||||||
|
if err != nil {
|
||||||
|
p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
feedURL := base.ResolveReference(up)
|
||||||
|
feedBase, err := util.BaseURL(feedURL)
|
||||||
|
if err != nil {
|
||||||
|
p.badProviderMetadata.error("Bad base path: %v", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
label := tlpLabel(feed.TLPLabel)
|
||||||
|
|
||||||
|
p.labelChecker = &rolieLabelChecker{
|
||||||
|
feedURL: feedURL.String(),
|
||||||
|
feedLabel: label,
|
||||||
|
advisories: map[csaf.TLPLabel]map[string]struct{}{},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := p.integrity(files, feedBase, rolieMask, p.badProviderMetadata.add); err != nil {
|
||||||
|
if err != errContinue {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 3: Check for completeness.
|
||||||
|
|
||||||
|
hasSummary := map[csaf.TLPLabel]struct{}{}
|
||||||
|
|
||||||
|
var (
|
||||||
|
hasUnlabeled = false
|
||||||
|
hasWhite = false
|
||||||
|
hasGreen = false
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, fs := range feeds {
|
||||||
|
for i := range fs {
|
||||||
|
feed := &fs[i]
|
||||||
|
if feed.URL == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
files := advisories[feed]
|
||||||
|
if files == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
up, err := url.Parse(string(*feed.URL))
|
||||||
|
if err != nil {
|
||||||
|
p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
feedBase := base.ResolveReference(up)
|
||||||
|
makeAbs := makeAbsolute(feedBase)
|
||||||
|
label := tlpLabel(feed.TLPLabel)
|
||||||
|
|
||||||
|
switch label {
|
||||||
|
case csaf.TLPLabelUnlabeled:
|
||||||
|
hasUnlabeled = true
|
||||||
|
case csaf.TLPLabelWhite:
|
||||||
|
hasWhite = true
|
||||||
|
case csaf.TLPLabelGreen:
|
||||||
|
hasGreen = true
|
||||||
|
}
|
||||||
|
|
||||||
|
reference := p.labelChecker.advisories[label]
|
||||||
|
advisories := make(map[string]struct{}, len(reference))
|
||||||
|
|
||||||
|
for _, adv := range files {
|
||||||
|
u, err := url.Parse(adv.URL())
|
||||||
|
if err != nil {
|
||||||
|
p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
advisories[makeAbs(u).String()] = struct{}{}
|
||||||
|
}
|
||||||
|
if containsAllKeys(reference, advisories) {
|
||||||
|
hasSummary[label] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !hasWhite && !hasGreen && !hasUnlabeled {
|
||||||
|
p.badROLIEfeed.error(
|
||||||
|
"One ROLIE feed with a TLP:WHITE, TLP:GREEN or unlabeled tlp must exist, " +
|
||||||
|
"but none were found.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Every TLP level with data should have at least on summary feed.
|
||||||
|
for _, label := range []csaf.TLPLabel{
|
||||||
|
csaf.TLPLabelUnlabeled,
|
||||||
|
csaf.TLPLabelWhite,
|
||||||
|
csaf.TLPLabelGreen,
|
||||||
|
csaf.TLPLabelAmber,
|
||||||
|
csaf.TLPLabelRed,
|
||||||
|
} {
|
||||||
|
if _, ok := hasSummary[label]; !ok && len(p.labelChecker.advisories[label]) > 0 {
|
||||||
|
p.badROLIEfeed.warn(
|
||||||
|
"ROLIE feed for TLP:%s has no accessible listed feed covering all advisories.",
|
||||||
|
label)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// containsAllKeys returns if m2 contains all keys of m1.
|
||||||
|
func containsAllKeys[K comparable, V any](m1, m2 map[K]V) bool {
|
||||||
|
for k := range m1 {
|
||||||
|
if _, ok := m2[k]; !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue