1
0
Fork 0
mirror of https://github.com/gocsaf/csaf.git synced 2025-12-22 11:55:40 +01:00

check each advisory is listable

This commit is contained in:
Sascha L. Teichmann 2022-05-17 17:20:43 +02:00
parent 131a7155fa
commit 16556cd8bb
2 changed files with 77 additions and 85 deletions

View file

@ -12,41 +12,64 @@ import (
"io" "io"
"net/http" "net/http"
"net/url" "net/url"
"regexp"
"strings"
"github.com/PuerkitoBio/goquery" "github.com/PuerkitoBio/goquery"
"github.com/csaf-poc/csaf_distribution/util"
) )
var yearFolder = regexp.MustCompile(`.*/?\d{4}/?$`) type (
pageContent struct {
err error
links map[string]struct{}
}
pages map[string]*pageContent
)
func (p *processor) linksOnPageURL(baseDir string) ([]string, error) { func (pgs pages) listed(path string, pro *processor) (bool, error) {
base, err := util.BaseURL(path)
base, err := url.Parse(baseDir)
if err != nil { if err != nil {
return nil, err return false, err
} }
client := p.httpClient() content := pgs[base]
p.checkTLS(baseDir) if content != nil { // already loaded
res, err := client.Get(baseDir) if content.err != nil {
return false, nil
}
_, ok := content.links[path]
return ok, nil
}
p.badDirListings.use() baseURL, err := url.Parse(base)
if err != nil {
return false, err
}
// load page
client := pro.httpClient()
pro.checkTLS(base)
res, err := client.Get(base)
pro.badDirListings.use()
if err != nil { if err != nil {
p.badDirListings.add("Fetching %s failed: %v", base, err) pro.badDirListings.add("Fetching %s failed: %v", base, err)
return nil, errContinue return false, errContinue
} }
if res.StatusCode != http.StatusOK { if res.StatusCode != http.StatusOK {
p.badDirListings.add("Fetching %s failed. Status code %d (%s)", pro.badDirListings.add("Fetching %s failed. Status code %d (%s)",
base, res.StatusCode, res.Status) base, res.StatusCode, res.Status)
return nil, errContinue return false, errContinue
} }
var ( content = &pageContent{
subDirs []string links: map[string]struct{}{},
files []string }
)
pgs[base] = content
// Build link index for this page.
if err := func() error { if err := func() error {
defer res.Body.Close() defer res.Body.Close()
return linksOnPage(res.Body, func(link string) error { return linksOnPage(res.Body, func(link string) error {
@ -55,58 +78,16 @@ func (p *processor) linksOnPageURL(baseDir string) ([]string, error) {
return err return err
} }
// Links may be relative // Links may be relative
abs := base.ResolveReference(u).String() abs := baseURL.ResolveReference(u).String()
switch { content.links[abs] = struct{}{}
case yearFolder.MatchString(link):
subDirs = append(subDirs, abs)
case strings.HasSuffix(link, ".json"):
files = append(files, abs)
}
return nil return nil
}) })
}(); err != nil { }(); err != nil {
return nil, err return false, errContinue
} }
// If we do not have sub folders, return links from this level. _, ok := content.links[path]
if len(subDirs) == 0 { return ok, nil
return files, nil
}
// Descent into folders
for _, sub := range subDirs {
p.checkTLS(sub)
res, err := client.Get(sub)
if err != nil {
p.badDirListings.add("Fetching %s failed: %v", sub, err)
return nil, errContinue
}
if res.StatusCode != http.StatusOK {
p.badDirListings.add("Fetching %s failed. Status code %d (%s)",
base, res.StatusCode, res.Status)
return nil, errContinue
}
if err := func() error {
defer res.Body.Close()
return linksOnPage(res.Body, func(link string) error {
u, err := url.Parse(link)
if err != nil {
return err
}
// Links may be relative
abs := base.ResolveReference(u).String()
// Only collect json files in this sub folder
if strings.HasSuffix(link, ".json") {
files = append(files, abs)
}
return nil
})
}(); err != nil {
return nil, err
}
}
return files, nil
} }
func linksOnPage(r io.Reader, visit func(string) error) error { func linksOnPage(r io.Reader, visit func(string) error) error {

View file

@ -204,6 +204,7 @@ func (p *processor) checkDomain(domain string) error {
(*processor).checkSecurity, (*processor).checkSecurity,
(*processor).checkCSAFs, (*processor).checkCSAFs,
(*processor).checkMissing, (*processor).checkMissing,
(*processor).checkListing,
(*processor).checkWellknownMetadataReporter, (*processor).checkWellknownMetadataReporter,
(*processor).checkDNSPathReporter, (*processor).checkDNSPathReporter,
} { } {
@ -517,10 +518,6 @@ func (p *processor) processROLIEFeed(feed string) error {
return err return err
} }
if err := p.checkListing(base, rolieListingMask); err != nil && err != errContinue {
return err
}
return nil return nil
} }
@ -690,21 +687,6 @@ func (p *processor) checkCSAFs(domain string) error {
return err return err
} }
if err := p.checkListing(base, listingMask); err != nil && err != errContinue {
return err
}
return nil
}
func (p *processor) checkListing(base string, mask whereType) error {
links, err := p.linksOnPageURL(base)
if err != nil {
return err
}
for _, link := range links {
p.markChecked(link, mask)
}
return nil return nil
} }
@ -742,6 +724,35 @@ func (p *processor) checkMissing(string) error {
return nil return nil
} }
// checkListing wents over all found adivisories URLs and checks,
// if their parent directory is listable.
func (p *processor) checkListing(string) error {
p.badDirListings.use()
pgs := pages{}
var unlisted []string
for f := range p.alreadyChecked {
found, err := pgs.listed(f, p)
if err != nil && err != errContinue {
return err
}
if !found {
unlisted = append(unlisted, f)
}
}
if len(unlisted) > 0 {
sort.Strings(unlisted)
p.badDirListings.add("Not listed advisories: %s",
strings.Join(unlisted, ", "))
}
return nil
}
var providerMetadataLocations = [...]string{ var providerMetadataLocations = [...]string{
".well-known/csaf", ".well-known/csaf",
"security/data/csaf", "security/data/csaf",