mirror of
https://github.com/gocsaf/csaf.git
synced 2025-12-22 18:15:42 +01:00
parent
be3dfcd542
commit
8032d47b50
8 changed files with 40 additions and 40 deletions
|
|
@ -219,7 +219,7 @@ func (c *config) checkProviders() error {
|
|||
return errors.New("need at least two providers")
|
||||
}
|
||||
|
||||
already := make(map[string]bool)
|
||||
already := util.Set[string]{}
|
||||
|
||||
for _, p := range c.Providers {
|
||||
if p.Name == "" {
|
||||
|
|
@ -228,10 +228,10 @@ func (c *config) checkProviders() error {
|
|||
if p.Domain == "" {
|
||||
return errors.New("no domain given for provider")
|
||||
}
|
||||
if already[p.Name] {
|
||||
if already.Contains(p.Name) {
|
||||
return fmt.Errorf("provider '%s' is configured more than once", p.Name)
|
||||
}
|
||||
already[p.Name] = true
|
||||
already.Add(p.Name)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) {
|
|||
w.summaries = make(map[string][]summary)
|
||||
|
||||
// Collecting the categories per label.
|
||||
w.categories = make(map[string]map[string]bool)
|
||||
w.categories = map[string]util.Set[string]{}
|
||||
|
||||
base, err := url.Parse(w.loc)
|
||||
if err != nil {
|
||||
|
|
@ -447,7 +447,7 @@ func (w *worker) extractCategories(label string, advisory any) error {
|
|||
|
||||
cats := w.categories[label]
|
||||
if cats == nil {
|
||||
cats = make(map[string]bool)
|
||||
cats = util.Set[string]{}
|
||||
w.categories[label] = cats
|
||||
}
|
||||
|
||||
|
|
@ -468,13 +468,13 @@ func (w *worker) extractCategories(label string, advisory any) error {
|
|||
// Ignore errors here as they result from not matching.
|
||||
w.expr.Extract(expr, matcher, true, advisory)
|
||||
} else { // Normal
|
||||
cats[cat] = true
|
||||
cats.Add(cat)
|
||||
}
|
||||
}
|
||||
|
||||
// Add dynamic categories.
|
||||
for _, cat := range dynamic {
|
||||
cats[cat] = true
|
||||
cats.Add(cat)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
|
|||
|
|
@ -40,13 +40,13 @@ type worker struct {
|
|||
expr *util.PathEval
|
||||
signRing *crypto.KeyRing
|
||||
|
||||
client util.Client // client per provider
|
||||
provider *provider // current provider
|
||||
metadataProvider any // current metadata provider
|
||||
loc string // URL of current provider-metadata.json
|
||||
dir string // Directory to store data to.
|
||||
summaries map[string][]summary // the summaries of the advisories.
|
||||
categories map[string]map[string]bool // the categories per label.
|
||||
client util.Client // client per provider
|
||||
provider *provider // current provider
|
||||
metadataProvider any // current metadata provider
|
||||
loc string // URL of current provider-metadata.json
|
||||
dir string // Directory to store data to.
|
||||
summaries map[string][]summary // the summaries of the advisories.
|
||||
categories map[string]util.Set[string] // the categories per label.
|
||||
}
|
||||
|
||||
func newWorker(num int, processor *processor) *worker {
|
||||
|
|
@ -104,9 +104,9 @@ func (w *worker) locateProviderMetadata(domain string) error {
|
|||
// removeOrphans removes the directories that are not in the providers list.
|
||||
func (p *processor) removeOrphans() error {
|
||||
|
||||
keep := make(map[string]bool)
|
||||
keep := util.Set[string]{}
|
||||
for _, p := range p.cfg.Providers {
|
||||
keep[p.Name] = true
|
||||
keep.Add(p.Name)
|
||||
}
|
||||
|
||||
path := filepath.Join(p.cfg.Web, ".well-known", "csaf-aggregator")
|
||||
|
|
@ -134,7 +134,7 @@ func (p *processor) removeOrphans() error {
|
|||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
if keep[entry.Name()] {
|
||||
if keep.Contains(entry.Name()) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ import (
|
|||
type (
|
||||
pageContent struct {
|
||||
err error
|
||||
links map[string]struct{}
|
||||
links util.Set[string]
|
||||
}
|
||||
pages map[string]*pageContent
|
||||
)
|
||||
|
|
@ -28,7 +28,7 @@ type (
|
|||
func (pgs pages) listed(
|
||||
path string,
|
||||
pro *processor,
|
||||
badDirs map[string]struct{},
|
||||
badDirs util.Set[string],
|
||||
) (bool, error) {
|
||||
pathURL, err := url.Parse(path)
|
||||
if err != nil {
|
||||
|
|
@ -45,8 +45,7 @@ func (pgs pages) listed(
|
|||
if content.err != nil {
|
||||
return false, nil
|
||||
}
|
||||
_, ok := content.links[path]
|
||||
return ok, nil
|
||||
return content.links.Contains(path), nil
|
||||
}
|
||||
|
||||
baseURL, err := url.Parse(base)
|
||||
|
|
@ -54,7 +53,7 @@ func (pgs pages) listed(
|
|||
return false, err
|
||||
}
|
||||
|
||||
if _, ok := badDirs[base]; ok {
|
||||
if badDirs.Contains(base) {
|
||||
return false, errContinue
|
||||
}
|
||||
|
||||
|
|
@ -67,18 +66,18 @@ func (pgs pages) listed(
|
|||
|
||||
if err != nil {
|
||||
pro.badDirListings.error("Fetching %s failed: %v", base, err)
|
||||
badDirs[base] = struct{}{}
|
||||
badDirs.Add(base)
|
||||
return false, errContinue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
pro.badDirListings.error("Fetching %s failed. Status code %d (%s)",
|
||||
base, res.StatusCode, res.Status)
|
||||
badDirs[base] = struct{}{}
|
||||
badDirs.Add(base)
|
||||
return false, errContinue
|
||||
}
|
||||
|
||||
content = &pageContent{
|
||||
links: map[string]struct{}{},
|
||||
links: util.Set[string]{},
|
||||
}
|
||||
|
||||
pgs[base] = content
|
||||
|
|
@ -94,15 +93,14 @@ func (pgs pages) listed(
|
|||
}
|
||||
// Links may be relative
|
||||
abs := baseURL.ResolveReference(u).String()
|
||||
content.links[abs] = struct{}{}
|
||||
content.links.Add(abs)
|
||||
return nil
|
||||
})
|
||||
}(); err != nil {
|
||||
return false, errContinue
|
||||
}
|
||||
|
||||
_, ok := content.links[path]
|
||||
return ok, nil
|
||||
return content.links.Contains(path), nil
|
||||
}
|
||||
|
||||
func linksOnPage(r io.Reader, visit func(string) error) error {
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ type processor struct {
|
|||
ageAccept func(time.Time) bool
|
||||
|
||||
redirects map[string][]string
|
||||
noneTLS map[string]struct{}
|
||||
noneTLS util.Set[string]
|
||||
alreadyChecked map[string]whereType
|
||||
pmdURL string
|
||||
pmd256 []byte
|
||||
|
|
@ -369,10 +369,10 @@ func (p *processor) checkDomain(domain string) error {
|
|||
// the value of "noneTLS" field if it is not HTTPS.
|
||||
func (p *processor) checkTLS(u string) {
|
||||
if p.noneTLS == nil {
|
||||
p.noneTLS = map[string]struct{}{}
|
||||
p.noneTLS = util.Set[string]{}
|
||||
}
|
||||
if x, err := url.Parse(u); err == nil && x.Scheme != "https" {
|
||||
p.noneTLS[u] = struct{}{}
|
||||
p.noneTLS.Add(u)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1163,7 +1163,7 @@ func (p *processor) checkListing(string) error {
|
|||
|
||||
var unlisted []string
|
||||
|
||||
badDirs := map[string]struct{}{}
|
||||
badDirs := util.Set[string]{}
|
||||
|
||||
if len(p.alreadyChecked) == 0 {
|
||||
p.badDirListings.info("No directory listings found.")
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ import (
|
|||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/v2/util"
|
||||
)
|
||||
|
||||
type (
|
||||
|
|
@ -211,15 +213,15 @@ func (r *redirectsReporter) report(p *processor, domain *Domain) {
|
|||
// keysNotInValues returns a slice of keys which are not in the values
|
||||
// of the given map.
|
||||
func keysNotInValues(m map[string][]string) []string {
|
||||
values := map[string]bool{}
|
||||
values := util.Set[string]{}
|
||||
for _, vs := range m {
|
||||
for _, v := range vs {
|
||||
values[v] = true
|
||||
values.Add(v)
|
||||
}
|
||||
}
|
||||
keys := make([]string, 0, len(m))
|
||||
for k := range m {
|
||||
if !values[k] {
|
||||
if !values.Contains(k) {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -284,7 +284,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error {
|
|||
"Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||
continue
|
||||
}
|
||||
advisories[makeAbs(u).String()] = struct{}{}
|
||||
advisories.Add(makeAbs(u).String())
|
||||
}
|
||||
if advisories.ContainsAll(reference) {
|
||||
hasSummary.Add(label)
|
||||
|
|
|
|||
|
|
@ -90,18 +90,18 @@ func NewROLIECategoryDocument(categories ...string) *ROLIECategoryDocument {
|
|||
// Merge merges the given categories into the existing ones.
|
||||
// The results indicates if there were changes.
|
||||
func (rcd *ROLIECategoryDocument) Merge(categories ...string) bool {
|
||||
index := make(map[string]bool)
|
||||
index := util.Set[string]{}
|
||||
for i := range rcd.Categories.Category {
|
||||
index[rcd.Categories.Category[i].Term] = true
|
||||
index.Add(rcd.Categories.Category[i].Term)
|
||||
}
|
||||
|
||||
oldLen := len(index)
|
||||
|
||||
for _, cat := range categories {
|
||||
if index[cat] {
|
||||
if index.Contains(cat) {
|
||||
continue
|
||||
}
|
||||
index[cat] = true
|
||||
index.Add(cat)
|
||||
rcd.Categories.Category = append(
|
||||
rcd.Categories.Category, ROLIECategory{Term: cat})
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue