1
0
Fork 0
mirror of https://github.com/gocsaf/csaf.git synced 2025-12-22 11:55:40 +01:00

Use Set type (#388)

* Use util.Set type.

* Caught another set usage.
This commit is contained in:
Sascha L. Teichmann 2023-07-04 13:00:01 +02:00 committed by GitHub
parent be3dfcd542
commit 8032d47b50
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 40 additions and 40 deletions

View file

@ -219,7 +219,7 @@ func (c *config) checkProviders() error {
return errors.New("need at least two providers") return errors.New("need at least two providers")
} }
already := make(map[string]bool) already := util.Set[string]{}
for _, p := range c.Providers { for _, p := range c.Providers {
if p.Name == "" { if p.Name == "" {
@ -228,10 +228,10 @@ func (c *config) checkProviders() error {
if p.Domain == "" { if p.Domain == "" {
return errors.New("no domain given for provider") return errors.New("no domain given for provider")
} }
if already[p.Name] { if already.Contains(p.Name) {
return fmt.Errorf("provider '%s' is configured more than once", p.Name) return fmt.Errorf("provider '%s' is configured more than once", p.Name)
} }
already[p.Name] = true already.Add(p.Name)
} }
return nil return nil
} }

View file

@ -65,7 +65,7 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) {
w.summaries = make(map[string][]summary) w.summaries = make(map[string][]summary)
// Collecting the categories per label. // Collecting the categories per label.
w.categories = make(map[string]map[string]bool) w.categories = map[string]util.Set[string]{}
base, err := url.Parse(w.loc) base, err := url.Parse(w.loc)
if err != nil { if err != nil {
@ -447,7 +447,7 @@ func (w *worker) extractCategories(label string, advisory any) error {
cats := w.categories[label] cats := w.categories[label]
if cats == nil { if cats == nil {
cats = make(map[string]bool) cats = util.Set[string]{}
w.categories[label] = cats w.categories[label] = cats
} }
@ -468,13 +468,13 @@ func (w *worker) extractCategories(label string, advisory any) error {
// Ignore errors here as they result from not matching. // Ignore errors here as they result from not matching.
w.expr.Extract(expr, matcher, true, advisory) w.expr.Extract(expr, matcher, true, advisory)
} else { // Normal } else { // Normal
cats[cat] = true cats.Add(cat)
} }
} }
// Add dynamic categories. // Add dynamic categories.
for _, cat := range dynamic { for _, cat := range dynamic {
cats[cat] = true cats.Add(cat)
} }
return nil return nil

View file

@ -40,13 +40,13 @@ type worker struct {
expr *util.PathEval expr *util.PathEval
signRing *crypto.KeyRing signRing *crypto.KeyRing
client util.Client // client per provider client util.Client // client per provider
provider *provider // current provider provider *provider // current provider
metadataProvider any // current metadata provider metadataProvider any // current metadata provider
loc string // URL of current provider-metadata.json loc string // URL of current provider-metadata.json
dir string // Directory to store data to. dir string // Directory to store data to.
summaries map[string][]summary // the summaries of the advisories. summaries map[string][]summary // the summaries of the advisories.
categories map[string]map[string]bool // the categories per label. categories map[string]util.Set[string] // the categories per label.
} }
func newWorker(num int, processor *processor) *worker { func newWorker(num int, processor *processor) *worker {
@ -104,9 +104,9 @@ func (w *worker) locateProviderMetadata(domain string) error {
// removeOrphans removes the directories that are not in the providers list. // removeOrphans removes the directories that are not in the providers list.
func (p *processor) removeOrphans() error { func (p *processor) removeOrphans() error {
keep := make(map[string]bool) keep := util.Set[string]{}
for _, p := range p.cfg.Providers { for _, p := range p.cfg.Providers {
keep[p.Name] = true keep.Add(p.Name)
} }
path := filepath.Join(p.cfg.Web, ".well-known", "csaf-aggregator") path := filepath.Join(p.cfg.Web, ".well-known", "csaf-aggregator")
@ -134,7 +134,7 @@ func (p *processor) removeOrphans() error {
} }
for _, entry := range entries { for _, entry := range entries {
if keep[entry.Name()] { if keep.Contains(entry.Name()) {
continue continue
} }

View file

@ -20,7 +20,7 @@ import (
type ( type (
pageContent struct { pageContent struct {
err error err error
links map[string]struct{} links util.Set[string]
} }
pages map[string]*pageContent pages map[string]*pageContent
) )
@ -28,7 +28,7 @@ type (
func (pgs pages) listed( func (pgs pages) listed(
path string, path string,
pro *processor, pro *processor,
badDirs map[string]struct{}, badDirs util.Set[string],
) (bool, error) { ) (bool, error) {
pathURL, err := url.Parse(path) pathURL, err := url.Parse(path)
if err != nil { if err != nil {
@ -45,8 +45,7 @@ func (pgs pages) listed(
if content.err != nil { if content.err != nil {
return false, nil return false, nil
} }
_, ok := content.links[path] return content.links.Contains(path), nil
return ok, nil
} }
baseURL, err := url.Parse(base) baseURL, err := url.Parse(base)
@ -54,7 +53,7 @@ func (pgs pages) listed(
return false, err return false, err
} }
if _, ok := badDirs[base]; ok { if badDirs.Contains(base) {
return false, errContinue return false, errContinue
} }
@ -67,18 +66,18 @@ func (pgs pages) listed(
if err != nil { if err != nil {
pro.badDirListings.error("Fetching %s failed: %v", base, err) pro.badDirListings.error("Fetching %s failed: %v", base, err)
badDirs[base] = struct{}{} badDirs.Add(base)
return false, errContinue return false, errContinue
} }
if res.StatusCode != http.StatusOK { if res.StatusCode != http.StatusOK {
pro.badDirListings.error("Fetching %s failed. Status code %d (%s)", pro.badDirListings.error("Fetching %s failed. Status code %d (%s)",
base, res.StatusCode, res.Status) base, res.StatusCode, res.Status)
badDirs[base] = struct{}{} badDirs.Add(base)
return false, errContinue return false, errContinue
} }
content = &pageContent{ content = &pageContent{
links: map[string]struct{}{}, links: util.Set[string]{},
} }
pgs[base] = content pgs[base] = content
@ -94,15 +93,14 @@ func (pgs pages) listed(
} }
// Links may be relative // Links may be relative
abs := baseURL.ResolveReference(u).String() abs := baseURL.ResolveReference(u).String()
content.links[abs] = struct{}{} content.links.Add(abs)
return nil return nil
}) })
}(); err != nil { }(); err != nil {
return false, errContinue return false, errContinue
} }
_, ok := content.links[path] return content.links.Contains(path), nil
return ok, nil
} }
func linksOnPage(r io.Reader, visit func(string) error) error { func linksOnPage(r io.Reader, visit func(string) error) error {

View file

@ -46,7 +46,7 @@ type processor struct {
ageAccept func(time.Time) bool ageAccept func(time.Time) bool
redirects map[string][]string redirects map[string][]string
noneTLS map[string]struct{} noneTLS util.Set[string]
alreadyChecked map[string]whereType alreadyChecked map[string]whereType
pmdURL string pmdURL string
pmd256 []byte pmd256 []byte
@ -369,10 +369,10 @@ func (p *processor) checkDomain(domain string) error {
// the value of "noneTLS" field if it is not HTTPS. // the value of "noneTLS" field if it is not HTTPS.
func (p *processor) checkTLS(u string) { func (p *processor) checkTLS(u string) {
if p.noneTLS == nil { if p.noneTLS == nil {
p.noneTLS = map[string]struct{}{} p.noneTLS = util.Set[string]{}
} }
if x, err := url.Parse(u); err == nil && x.Scheme != "https" { if x, err := url.Parse(u); err == nil && x.Scheme != "https" {
p.noneTLS[u] = struct{}{} p.noneTLS.Add(u)
} }
} }
@ -1163,7 +1163,7 @@ func (p *processor) checkListing(string) error {
var unlisted []string var unlisted []string
badDirs := map[string]struct{}{} badDirs := util.Set[string]{}
if len(p.alreadyChecked) == 0 { if len(p.alreadyChecked) == 0 {
p.badDirListings.info("No directory listings found.") p.badDirListings.info("No directory listings found.")

View file

@ -12,6 +12,8 @@ import (
"fmt" "fmt"
"sort" "sort"
"strings" "strings"
"github.com/csaf-poc/csaf_distribution/v2/util"
) )
type ( type (
@ -211,15 +213,15 @@ func (r *redirectsReporter) report(p *processor, domain *Domain) {
// keysNotInValues returns a slice of keys which are not in the values // keysNotInValues returns a slice of keys which are not in the values
// of the given map. // of the given map.
func keysNotInValues(m map[string][]string) []string { func keysNotInValues(m map[string][]string) []string {
values := map[string]bool{} values := util.Set[string]{}
for _, vs := range m { for _, vs := range m {
for _, v := range vs { for _, v := range vs {
values[v] = true values.Add(v)
} }
} }
keys := make([]string, 0, len(m)) keys := make([]string, 0, len(m))
for k := range m { for k := range m {
if !values[k] { if !values.Contains(k) {
keys = append(keys, k) keys = append(keys, k)
} }
} }

View file

@ -284,7 +284,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error {
"Invalid URL %s in feed: %v.", *feed.URL, err) "Invalid URL %s in feed: %v.", *feed.URL, err)
continue continue
} }
advisories[makeAbs(u).String()] = struct{}{} advisories.Add(makeAbs(u).String())
} }
if advisories.ContainsAll(reference) { if advisories.ContainsAll(reference) {
hasSummary.Add(label) hasSummary.Add(label)

View file

@ -90,18 +90,18 @@ func NewROLIECategoryDocument(categories ...string) *ROLIECategoryDocument {
// Merge merges the given categories into the existing ones. // Merge merges the given categories into the existing ones.
// The results indicates if there were changes. // The results indicates if there were changes.
func (rcd *ROLIECategoryDocument) Merge(categories ...string) bool { func (rcd *ROLIECategoryDocument) Merge(categories ...string) bool {
index := make(map[string]bool) index := util.Set[string]{}
for i := range rcd.Categories.Category { for i := range rcd.Categories.Category {
index[rcd.Categories.Category[i].Term] = true index.Add(rcd.Categories.Category[i].Term)
} }
oldLen := len(index) oldLen := len(index)
for _, cat := range categories { for _, cat := range categories {
if index[cat] { if index.Contains(cat) {
continue continue
} }
index[cat] = true index.Add(cat)
rcd.Categories.Category = append( rcd.Categories.Category = append(
rcd.Categories.Category, ROLIECategory{Term: cat}) rcd.Categories.Category, ROLIECategory{Term: cat})
} }