From 9a1c66eb8ead1a7075c3ee00bbef3bb97a469883 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 15 Jan 2024 08:59:58 +0100 Subject: [PATCH 001/176] checker: Ensure that the processor is reset before checking each domain. (#523) --- cmd/csaf_checker/processor.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 7eaefef..8f3a6c1 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -202,8 +202,8 @@ func (p *processor) close() { } } -// clean clears the fields values of the given processor. -func (p *processor) clean() { +// reset clears the fields values of the given processor. +func (p *processor) reset() { p.redirects = nil p.noneTLS = nil for k := range p.alreadyChecked { @@ -247,6 +247,8 @@ func (p *processor) run(domains []string) (*Report, error) { } for _, d := range domains { + p.reset() + if !p.checkProviderMetadata(d) { // We cannot build a report if the provider metadata cannot be parsed. log.Printf("Could not parse the Provider-Metadata.json of: %s\n", d) @@ -287,7 +289,6 @@ func (p *processor) run(domains []string) (*Report, error) { domain.Passed = rules.eval(p) report.Domains = append(report.Domains, domain) - p.clean() } return &report, nil From b858640fc173be3b4373694b036c83bd5fcc26a8 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Fri, 23 Feb 2024 14:48:39 +0100 Subject: [PATCH 002/176] docs: fix minor typo in test-keys/Readme.md (#525) --- docs/test-keys/Readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/test-keys/Readme.md b/docs/test-keys/Readme.md index 5b422fd..94c8d8f 100644 --- a/docs/test-keys/Readme.md +++ b/docs/test-keys/Readme.md @@ -1,6 +1,6 @@ OpenPGP key-pairs for testing only. -Note: as the keypairs wre fully public, **do not use them for production**. +Note: as the keypairs are fully public, **do not use them for production**. Create your own keypair(s) with the security properties and operational security you need. From 51a681ef3101506ec402e826064bc28f00a94250 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 27 Feb 2024 09:44:41 +0100 Subject: [PATCH 003/176] docs: improve link to CSAF standard documents * Add overview link to csaf.io * Fix link to specification and add link to the latest errata document. --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 54daf87..69601cd 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ # csaf_distribution -An implementation of a -[CSAF 2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html) +An implementation of a [CSAF](https://csaf.io/) +[2.0 Spec](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) +([Errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. From e658738b568ba6c6173325ce4b1081c8142b081c Mon Sep 17 00:00:00 2001 From: Christian Banse Date: Thu, 18 Apr 2024 19:51:25 +0200 Subject: [PATCH 004/176] Added support for structured logging in `csaf_aggretator` This PR adds structured logging for the aggregator service. Currently, only the text handler is used, but I can extend this to use the JSON handler as well. In this case, probably some code that is shared between the aggregator and the downloader would need to be moved to a common package. I was also wondering, whether this repo is moving to Go 1.21 at the future, since `slog` was introduced in to the standard lib in 1.21. So currently, this still relies on the `x/exp` package. Fixes #462 --- cmd/csaf_aggregator/config.go | 21 ++++++++++--- cmd/csaf_aggregator/full.go | 38 ++++++++++++++++------- cmd/csaf_aggregator/indices.go | 3 +- cmd/csaf_aggregator/interim.go | 10 +++--- cmd/csaf_aggregator/lazytransaction.go | 5 +-- cmd/csaf_aggregator/main.go | 11 ++++--- cmd/csaf_aggregator/mirror.go | 43 ++++++++++++-------------- cmd/csaf_aggregator/processor.go | 32 +++++++++++-------- csaf/advisories.go | 12 +++++++ go.mod | 2 +- go.sum | 2 ++ internal/options/options.go | 10 ++++++ 12 files changed, 122 insertions(+), 67 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index edb1fd9..2a2bef2 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -12,7 +12,6 @@ import ( "crypto/tls" "errors" "fmt" - "log" "net/http" "os" "runtime" @@ -26,6 +25,7 @@ import ( "github.com/csaf-poc/csaf_distribution/v3/internal/models" "github.com/csaf-poc/csaf_distribution/v3/internal/options" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" "golang.org/x/time/rate" ) @@ -178,9 +178,11 @@ func (p *provider) ageAccept(c *config) func(time.Time) bool { } if c.Verbose { - log.Printf( - "Setting up filter to accept advisories within time range %s to %s\n", - r[0].Format(time.RFC3339), r[1].Format(time.RFC3339)) + slog.Debug( + "Setting up filter to accept advisories within time range", + "from", r[0].Format(time.RFC3339), + "to", r[1].Format(time.RFC3339), + ) } return r.Contains } @@ -393,6 +395,17 @@ func (c *config) setDefaults() { } } +// prepareLogging sets up the structured logging. +func (cfg *config) prepareLogging() error { + ho := slog.HandlerOptions{ + Level: slog.LevelDebug, + } + handler := slog.NewTextHandler(os.Stdout, &ho) + logger := slog.New(handler) + slog.SetDefault(logger) + return nil +} + // compileIgnorePatterns compiles the configured patterns to be ignored. func (p *provider) compileIgnorePatterns() error { pm, err := filter.NewPatternMatcher(p.IgnorePattern) diff --git a/cmd/csaf_aggregator/full.go b/cmd/csaf_aggregator/full.go index 600c650..2165397 100644 --- a/cmd/csaf_aggregator/full.go +++ b/cmd/csaf_aggregator/full.go @@ -11,7 +11,6 @@ package main import ( "errors" "fmt" - "log" "os" "path/filepath" "strings" @@ -20,6 +19,7 @@ import ( "github.com/csaf-poc/csaf_distribution/v3/csaf" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" ) type fullJob struct { @@ -29,11 +29,13 @@ type fullJob struct { err error } -// setupProviderFull fetches the provider-metadate.json for a specific provider. +// setupProviderFull fetches the provider-metadata.json for a specific provider. func (w *worker) setupProviderFull(provider *provider) error { - log.Printf("worker #%d: %s (%s)\n", - w.num, provider.Name, provider.Domain) - + w.log.Info("Setting up provider", + "provider", slog.GroupValue( + slog.String("name", provider.Name), + slog.String("domain", provider.Domain), + )) w.dir = "" w.provider = provider @@ -55,7 +57,7 @@ func (w *worker) setupProviderFull(provider *provider) error { "provider-metadata.json has %d validation issues", len(errors)) } - log.Printf("provider-metadata: %s\n", w.loc) + w.log.Info("Using provider-metadata", "url", w.loc) return nil } @@ -79,7 +81,7 @@ func (w *worker) fullWork(wg *sync.WaitGroup, jobs <-chan *fullJob) { func (p *processor) full() error { if p.cfg.runAsMirror() { - log.Println("Running in aggregator mode") + p.log.Info("Running in aggregator mode") // check if we need to setup a remote validator if p.cfg.RemoteValidatorOptions != nil { @@ -96,16 +98,18 @@ func (p *processor) full() error { }() } } else { - log.Println("Running in lister mode") + p.log.Info("Running in lister mode") } queue := make(chan *fullJob) var wg sync.WaitGroup - log.Printf("Starting %d workers.\n", p.cfg.Workers) + p.log.Info("Starting workers...", "num", p.cfg.Workers) + for i := 1; i <= p.cfg.Workers; i++ { wg.Add(1) w := newWorker(i, p) + go w.fullWork(&wg, queue) } @@ -135,12 +139,22 @@ func (p *processor) full() error { for i := range jobs { j := &jobs[i] if j.err != nil { - log.Printf("error: '%s' failed: %v\n", j.provider.Name, j.err) + p.log.Error("Job execution failed", + slog.Group("job", + slog.Group("provider"), + "name", j.provider.Name, + ), + "err", j.err, + ) continue } if j.aggregatorProvider == nil { - log.Printf( - "error: '%s' does not produce any result.\n", j.provider.Name) + p.log.Error("Job did not produce any result", + slog.Group("job", + slog.Group("provider"), + "name", j.provider.Name, + ), + ) continue } diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 69954bd..cc91b45 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -12,7 +12,6 @@ import ( "bufio" "encoding/csv" "fmt" - "log" "os" "path/filepath" "sort" @@ -377,7 +376,7 @@ func (w *worker) writeIndices() error { } for label, summaries := range w.summaries { - log.Printf("%s: %d\n", label, len(summaries)) + w.log.Debug("Writing indices", "label", label, "summaries.num", len(summaries)) if err := w.writeInterims(label, summaries); err != nil { return err } diff --git a/cmd/csaf_aggregator/interim.go b/cmd/csaf_aggregator/interim.go index bdd5ebc..cf4a937 100644 --- a/cmd/csaf_aggregator/interim.go +++ b/cmd/csaf_aggregator/interim.go @@ -17,7 +17,6 @@ import ( "errors" "fmt" "io" - "log" "net/http" "os" "path/filepath" @@ -102,12 +101,12 @@ func (w *worker) checkInterims( // XXX: Should we return an error here? for _, e := range errors { - log.Printf("validation error: %s: %v\n", url, e) + w.log.Error("validation error", "url", url, "err", e) } // We need to write the changed content. - // This will start the transcation if not already started. + // This will start the transaction if not already started. dst, err := tx.Dst() if err != nil { return nil, err @@ -159,8 +158,7 @@ func (w *worker) checkInterims( // setupProviderInterim prepares the worker for a specific provider. func (w *worker) setupProviderInterim(provider *provider) { - log.Printf("worker #%d: %s (%s)\n", - w.num, provider.Name, provider.Domain) + w.log.Info("Setting up worker", provider.Name, provider.Domain) w.dir = "" w.provider = provider @@ -262,7 +260,7 @@ func (p *processor) interim() error { queue := make(chan *interimJob) var wg sync.WaitGroup - log.Printf("Starting %d workers.\n", p.cfg.Workers) + p.log.Info("Starting workers...", "num", p.cfg.Workers) for i := 1; i <= p.cfg.Workers; i++ { wg.Add(1) w := newWorker(i, p) diff --git a/cmd/csaf_aggregator/lazytransaction.go b/cmd/csaf_aggregator/lazytransaction.go index a2b1e94..458002f 100644 --- a/cmd/csaf_aggregator/lazytransaction.go +++ b/cmd/csaf_aggregator/lazytransaction.go @@ -9,11 +9,11 @@ package main import ( - "log" "os" "path/filepath" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" ) type lazyTransaction struct { @@ -85,7 +85,8 @@ func (lt *lazyTransaction) commit() error { os.RemoveAll(lt.dst) return err } - log.Printf("Move %q -> %q\n", symlink, lt.src) + + slog.Debug("Moving directory", "from", symlink, "to", lt.src) if err := os.Rename(symlink, lt.src); err != nil { os.RemoveAll(lt.dst) return err diff --git a/cmd/csaf_aggregator/main.go b/cmd/csaf_aggregator/main.go index 74a9670..b738a7e 100644 --- a/cmd/csaf_aggregator/main.go +++ b/cmd/csaf_aggregator/main.go @@ -15,7 +15,9 @@ import ( "path/filepath" "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gofrs/flock" + "golang.org/x/exp/slog" ) func lock(lockFile *string, fn func() error) error { @@ -44,8 +46,9 @@ func lock(lockFile *string, fn func() error) error { func main() { _, cfg, err := parseArgsConfig() - options.ErrorCheck(err) - options.ErrorCheck(cfg.prepare()) - p := processor{cfg: cfg} - options.ErrorCheck(lock(cfg.LockFile, p.process)) + cfg.prepareLogging() + options.ErrorCheckStructured(err) + options.ErrorCheckStructured(cfg.prepare()) + p := processor{cfg: cfg, log: slog.Default()} + options.ErrorCheckStructured(lock(cfg.LockFile, p.process)) } diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 3acb48e..0779a5b 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -16,7 +16,7 @@ import ( "encoding/json" "fmt" "io" - "log" + "log/slog" "net/http" "net/url" "os" @@ -47,7 +47,7 @@ func (w *worker) mirror() (*csaf.AggregatorCSAFProvider, error) { if err != nil && w.dir != "" { // If something goes wrong remove the debris. if err := os.RemoveAll(w.dir); err != nil { - log.Printf("error: %v\n", err) + w.log.Error("Could not remove directory", "path", w.dir, "err", err) } } return result, err @@ -166,7 +166,7 @@ func (w *worker) writeProviderMetadata() error { {Expr: `$.public_openpgp_keys`, Action: util.ReMarshalMatcher(&pm.PGPKeys)}, }, w.metadataProvider); err != nil { // only log the errors - log.Printf("extracting data from orignal provider failed: %v\n", err) + w.log.Error("Extracting data from original provider failed", "err", err) } // We are mirroring the remote public keys, too. @@ -196,11 +196,11 @@ func (w *worker) mirrorPGPKeys(pm *csaf.ProviderMetadata) error { for i := range pm.PGPKeys { pgpKey := &pm.PGPKeys[i] if pgpKey.URL == nil { - log.Printf("ignoring PGP key without URL: %s\n", pgpKey.Fingerprint) + w.log.Warn("Ignoring PGP key without URL", "fingerprint", pgpKey.Fingerprint) continue } if _, err := hex.DecodeString(string(pgpKey.Fingerprint)); err != nil { - log.Printf("ignoring PGP with invalid fingerprint: %s\n", *pgpKey.URL) + w.log.Warn("Ignoring PGP key with invalid fingerprint", "url", *pgpKey.URL) continue } @@ -344,7 +344,7 @@ func (w *worker) doMirrorTransaction() error { // Check if there is a sysmlink already. target := filepath.Join(w.processor.cfg.Folder, w.provider.Name) - log.Printf("target: '%s'\n", target) + w.log.Debug("Checking for path existance", "path", target) exists, err := util.PathExists(target) if err != nil { @@ -359,7 +359,7 @@ func (w *worker) doMirrorTransaction() error { } } - log.Printf("sym link: %s -> %s\n", w.dir, target) + w.log.Debug("Creating symbol", "from", w.dir, "to", target) // Create a new symlink if err := os.Symlink(w.dir, target); err != nil { @@ -368,7 +368,7 @@ func (w *worker) doMirrorTransaction() error { } // Move the symlink - log.Printf("Move: %s -> %s\n", target, webTarget) + w.log.Debug("Moving symbol", "from", target, "to", webTarget) if err := os.Rename(target, webTarget); err != nil { os.RemoveAll(w.dir) return err @@ -499,14 +499,14 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) u, err := url.Parse(file.URL()) if err != nil { - log.Printf("error: %s\n", err) + w.log.Error("Could not parse advisory file URL", "err", err) continue } // Should we ignore this advisory? if w.provider.ignoreURL(file.URL(), w.processor.cfg) { if w.processor.cfg.Verbose { - log.Printf("Ignoring %s: %q\n", w.provider.Name, file.URL()) + w.log.Info("Ignoring advisory", slog.Group("provider", "name", w.provider.Name), "file", file) } continue } @@ -514,7 +514,7 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) // Ignore not conforming filenames. filename := filepath.Base(u.Path) if !util.ConformingFileName(filename) { - log.Printf("Not conforming filename %q. Ignoring.\n", filename) + w.log.Warn("Ignoring advisory because of non-conforming filename", "filename", filename) continue } @@ -531,19 +531,18 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) } if err := downloadJSON(w.client, file.URL(), download); err != nil { - log.Printf("error: %v\n", err) + w.log.Error("Error while downloading JSON", "err", err) continue } // Check against CSAF schema. errors, err := csaf.ValidateCSAF(advisory) if err != nil { - log.Printf("error: %s: %v", file, err) + w.log.Error("Error while validating CSAF schema", "err", err) continue } if len(errors) > 0 { - log.Printf("CSAF file %s has %d validation errors.\n", - file, len(errors)) + w.log.Error("CSAF file has validation errors", "num.errors", len(errors), "file", file) continue } @@ -551,29 +550,27 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) if rmv := w.processor.remoteValidator; rmv != nil { rvr, err := rmv.Validate(advisory) if err != nil { - log.Printf("Calling remote validator failed: %s\n", err) + w.log.Error("Calling remote validator failed", "err", err) continue } if !rvr.Valid { - log.Printf( - "CSAF file %s does not validate remotely.\n", file) + w.log.Error("CSAF file does not validate remotely", "file", file.URL()) continue } } sum, err := csaf.NewAdvisorySummary(w.expr, advisory) if err != nil { - log.Printf("error: %s: %v\n", file, err) + w.log.Error("Error while creating new advisory", "file", file, "err", err) continue } if util.CleanFileName(sum.ID) != filename { - log.Printf("ID %q does not match filename %s", - sum.ID, filename) + w.log.Error("ID mismatch", "id", sum.ID, "filename", filename) } if err := w.extractCategories(label, advisory); err != nil { - log.Printf("error: %s: %v\n", file, err) + w.log.Error("Could not extract categories", "file", file, "err", err) continue } @@ -624,7 +621,7 @@ func (w *worker) downloadSignatureOrSign(url, fname string, data []byte) error { if err != nil { if err != errNotFound { - log.Printf("error: %s: %v\n", url, err) + w.log.Error("Could not find signature URL", "url", url, "err", err) } // Sign it our self. if sig, err = w.sign(data); err != nil { diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index ccd5062..9a71b90 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -10,14 +10,14 @@ package main import ( "fmt" - "log" "os" "path/filepath" - "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" "github.com/csaf-poc/csaf_distribution/v3/util" + + "github.com/ProtonMail/gopenpgp/v2/crypto" + "golang.org/x/exp/slog" ) type processor struct { @@ -26,6 +26,9 @@ type processor struct { // remoteValidator is a globally configured remote validator. remoteValidator csaf.RemoteValidator + + // log is the structured logger for the whole processor. + log *slog.Logger } type summary struct { @@ -48,6 +51,7 @@ type worker struct { dir string // Directory to store data to. summaries map[string][]summary // the summaries of the advisories. categories map[string]util.Set[string] // the categories per label. + log *slog.Logger // the structured logger, supplied with the worker number. } func newWorker(num int, processor *processor) *worker { @@ -55,6 +59,7 @@ func newWorker(num int, processor *processor) *worker { num: num, processor: processor, expr: util.NewPathEval(), + log: processor.log.With(slog.Int("worker", num)), } } @@ -86,9 +91,10 @@ func (w *worker) locateProviderMetadata(domain string) error { if w.processor.cfg.Verbose { for i := range lpmd.Messages { - log.Printf( - "Loading provider-metadata.json of %q: %s\n", - domain, lpmd.Messages[i].Message) + w.log.Info( + "Loading provider-metadata.json", + "domain", domain, + "message", lpmd.Messages[i].Message) } } @@ -141,7 +147,7 @@ func (p *processor) removeOrphans() error { fi, err := entry.Info() if err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not retrieve file info", "err", err) continue } @@ -153,13 +159,13 @@ func (p *processor) removeOrphans() error { d := filepath.Join(path, entry.Name()) r, err := filepath.EvalSymlinks(d) if err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not evaluate symlink", "err", err) continue } fd, err := os.Stat(r) if err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not retrieve file stats", "err", err) continue } @@ -169,18 +175,18 @@ func (p *processor) removeOrphans() error { } // Remove the link. - log.Printf("removing link %s -> %s\n", d, r) + p.log.Info("Removing link", "path", fmt.Sprintf("%s -> %s", d, r)) if err := os.Remove(d); err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not remove symlink", "err", err) continue } // Only remove directories which are in our folder. if rel, err := filepath.Rel(prefix, r); err == nil && rel == filepath.Base(r) { - log.Printf("removing directory %s\n", r) + p.log.Info("Remove directory", "path", r) if err := os.RemoveAll(r); err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not remove directory", "err", err) } } } diff --git a/csaf/advisories.go b/csaf/advisories.go index 5b85690..abd55c6 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -13,6 +13,7 @@ import ( "fmt" "io" "log" + "log/slog" "net/http" "net/url" "strings" @@ -23,6 +24,7 @@ import ( // AdvisoryFile constructs the urls of a remote file. type AdvisoryFile interface { + slog.LogValuer URL() string SHA256URL() string SHA512URL() string @@ -46,6 +48,11 @@ func (paf PlainAdvisoryFile) SHA512URL() string { return string(paf) + ".sha512" // SignURL returns the URL of signature file of this advisory. func (paf PlainAdvisoryFile) SignURL() string { return string(paf) + ".asc" } +// LogValue implements [slog.LogValuer] +func (paf PlainAdvisoryFile) LogValue() slog.Value { + return slog.GroupValue(slog.String("url", paf.URL())) +} + // HashedAdvisoryFile is a more involed version of checkFile. // Here each component can be given explicitly. // If a component is not given it is constructed by @@ -71,6 +78,11 @@ func (haf HashedAdvisoryFile) SHA512URL() string { return haf.name(2, ".sha512") // SignURL returns the URL of signature file of this advisory. func (haf HashedAdvisoryFile) SignURL() string { return haf.name(3, ".asc") } +// LogValue implements [slog.LogValuer] +func (haf HashedAdvisoryFile) LogValue() slog.Value { + return slog.GroupValue(slog.String("url", haf.URL())) +} + // AdvisoryFileProcessor implements the extraction of // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { diff --git a/go.mod b/go.mod index 469c8a3..1f6f51d 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 go.etcd.io/bbolt v1.3.8 golang.org/x/crypto v0.14.0 - golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa + golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f golang.org/x/term v0.13.0 golang.org/x/time v0.3.0 ) diff --git a/go.sum b/go.sum index 3a101d4..cbbb382 100644 --- a/go.sum +++ b/go.sum @@ -53,6 +53,8 @@ golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa h1:FRnLl4eNAQl8hwxVVC17teOw8kdjVDVAiFMtgUdTSRQ= golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f h1:99ci1mjWVBWwJiEKYY6jWa4d2nTQVIEhZIptnrVb1XY= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= diff --git a/internal/options/options.go b/internal/options/options.go index 961b4b4..ffd699b 100644 --- a/internal/options/options.go +++ b/internal/options/options.go @@ -19,6 +19,7 @@ import ( "github.com/mitchellh/go-homedir" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" ) // Parser helps parsing command line arguments and loading @@ -147,3 +148,12 @@ func ErrorCheck(err error) { log.Fatalf("error: %v\n", err) } } + +// ErrorCheck checks if err is not nil and terminates +// the program if so. +func ErrorCheckStructured(err error) { + if err != nil { + slog.Error("Error while executing program", "err", err) + os.Exit(1) + } +} From fb1cf32e17f2dd007efc979c8cbb3fc80786f2e6 Mon Sep 17 00:00:00 2001 From: Christian Banse Date: Fri, 19 Apr 2024 09:35:36 +0200 Subject: [PATCH 005/176] Fixed linting errors --- cmd/csaf_aggregator/config.go | 2 +- internal/options/options.go | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 2a2bef2..f1e602d 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -396,7 +396,7 @@ func (c *config) setDefaults() { } // prepareLogging sets up the structured logging. -func (cfg *config) prepareLogging() error { +func (c *config) prepareLogging() error { ho := slog.HandlerOptions{ Level: slog.LevelDebug, } diff --git a/internal/options/options.go b/internal/options/options.go index ffd699b..d8574ff 100644 --- a/internal/options/options.go +++ b/internal/options/options.go @@ -149,8 +149,9 @@ func ErrorCheck(err error) { } } -// ErrorCheck checks if err is not nil and terminates -// the program if so. +// ErrorCheckStructured checks if err is not nil and terminates the program if +// so. This is similar to [ErrorCheck], but uses [slog] instead of the +// non-structured Go logging. func ErrorCheckStructured(err error) { if err != nil { slog.Error("Error while executing program", "err", err) From 39a29e39f1272bee8794413b1372cf3a592fc3c6 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Mon, 22 Apr 2024 13:11:30 +0200 Subject: [PATCH 006/176] Change Licenses from MIT to Apache 2.0 --- LICENSES/Apache-2.0.txt | 73 +++++++++++++++++++++ LICENSES/MIT.txt | 21 ------ Makefile | 6 +- README.md | 12 +++- cmd/csaf_aggregator/client.go | 6 +- cmd/csaf_aggregator/config.go | 6 +- cmd/csaf_aggregator/files.go | 6 +- cmd/csaf_aggregator/full.go | 6 +- cmd/csaf_aggregator/indices.go | 6 +- cmd/csaf_aggregator/interim.go | 6 +- cmd/csaf_aggregator/lazytransaction.go | 6 +- cmd/csaf_aggregator/lister.go | 6 +- cmd/csaf_aggregator/main.go | 6 +- cmd/csaf_aggregator/mirror.go | 6 +- cmd/csaf_aggregator/processor.go | 6 +- cmd/csaf_checker/config.go | 6 +- cmd/csaf_checker/links.go | 6 +- cmd/csaf_checker/links_test.go | 6 +- cmd/csaf_checker/main.go | 6 +- cmd/csaf_checker/processor.go | 6 +- cmd/csaf_checker/report.go | 6 +- cmd/csaf_checker/reporters.go | 6 +- cmd/csaf_checker/roliecheck.go | 6 +- cmd/csaf_checker/rules.go | 6 +- cmd/csaf_downloader/config.go | 6 +- cmd/csaf_downloader/downloader.go | 6 +- cmd/csaf_downloader/forwarder.go | 6 +- cmd/csaf_downloader/forwarder_test.go | 6 +- cmd/csaf_downloader/main.go | 6 +- cmd/csaf_downloader/stats.go | 6 +- cmd/csaf_downloader/stats_test.go | 6 +- cmd/csaf_provider/actions.go | 6 +- cmd/csaf_provider/config.go | 6 +- cmd/csaf_provider/controller.go | 6 +- cmd/csaf_provider/create.go | 6 +- cmd/csaf_provider/files.go | 6 +- cmd/csaf_provider/indices.go | 6 +- cmd/csaf_provider/main.go | 6 +- cmd/csaf_provider/mux.go | 6 +- cmd/csaf_provider/rolie.go | 6 +- cmd/csaf_provider/tmpl/create.html | 6 +- cmd/csaf_provider/tmpl/index.html | 6 +- cmd/csaf_provider/tmpl/upload.html | 6 +- cmd/csaf_provider/transaction.go | 6 +- cmd/csaf_uploader/config.go | 6 +- cmd/csaf_uploader/main.go | 6 +- cmd/csaf_uploader/processor.go | 6 +- cmd/csaf_validator/main.go | 6 +- csaf/advisories.go | 6 +- csaf/advisory.go | 6 +- csaf/cvss20enums.go | 6 +- csaf/cvss3enums.go | 6 +- csaf/doc.go | 6 +- csaf/generate_cvss_enums.go | 12 ++-- csaf/models.go | 6 +- csaf/providermetaloader.go | 6 +- csaf/remotevalidation.go | 6 +- csaf/rolie.go | 6 +- csaf/summary.go | 6 +- csaf/util.go | 6 +- csaf/util_test.go | 6 +- csaf/validation.go | 6 +- docs/scripts/DNSConfigForItest.sh | 6 +- docs/scripts/TLSClientConfigsForITest.sh | 6 +- docs/scripts/TLSConfigsForITest.sh | 6 +- docs/scripts/createCCForITest.sh | 6 +- docs/scripts/createRootCAForITest.sh | 6 +- docs/scripts/createWebserverCertForITest.sh | 6 +- docs/scripts/downloadExamples.sh | 6 +- docs/scripts/setupProviderForITest.sh | 6 +- docs/scripts/setupValidationService.sh | 6 +- docs/scripts/testAggregator.sh | 6 +- docs/scripts/testChecker.sh | 6 +- docs/scripts/testDownloader.sh | 6 +- docs/scripts/uploadToProvider.sh | 6 +- internal/certs/certs.go | 6 +- internal/certs/certs_test.go | 6 +- internal/filter/filter.go | 6 +- internal/filter/filter_test.go | 6 +- internal/misc/doc.go | 6 +- internal/misc/mime.go | 6 +- internal/misc/mime_test.go | 6 +- internal/models/models.go | 6 +- internal/models/models_test.go | 6 +- internal/options/log.go | 6 +- internal/options/log_test.go | 6 +- internal/options/options.go | 6 +- internal/options/options_test.go | 6 +- util/client.go | 6 +- util/csv.go | 6 +- util/doc.go | 6 +- util/file.go | 6 +- util/file_test.go | 6 +- util/hash.go | 6 +- util/json.go | 6 +- util/set.go | 6 +- util/url.go | 6 +- util/version.go | 6 +- 98 files changed, 372 insertions(+), 310 deletions(-) create mode 100644 LICENSES/Apache-2.0.txt delete mode 100644 LICENSES/MIT.txt diff --git a/LICENSES/Apache-2.0.txt b/LICENSES/Apache-2.0.txt new file mode 100644 index 0000000..137069b --- /dev/null +++ b/LICENSES/Apache-2.0.txt @@ -0,0 +1,73 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/LICENSES/MIT.txt b/LICENSES/MIT.txt deleted file mode 100644 index 57165e6..0000000 --- a/LICENSES/MIT.txt +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/Makefile b/Makefile index 19e31c7..b4b3964 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ -# This file is Free Software under the MIT License -# without warranty, see README.md and LICENSES/MIT.txt for details. +# This file is Free Software under the Apache-2.0 License +# without warranty, see README.md and LICENSES/Apache-2.0.txt for details. # -# SPDX-License-Identifier: MIT +# SPDX-License-Identifier: Apache-2.0 # # SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) # Software-Engineering: 2021 Intevation GmbH diff --git a/README.md b/README.md index 69601cd..78342f5 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,13 @@ + + # csaf_distribution An implementation of a [CSAF](https://csaf.io/) @@ -90,7 +100,7 @@ For further details of the development process consult our [development page](./ ## License -- `csaf_distribution` is licensed as Free Software under MIT License. +- `csaf_distribution` is licensed as Free Software under the terms of the [Apache License, Version 2.0](./LICENSES/Apache-2.0.txt). - See the specific source files for details, the license itself can be found in the directory `LICENSES/`. diff --git a/cmd/csaf_aggregator/client.go b/cmd/csaf_aggregator/client.go index deb108a..8200d34 100644 --- a/cmd/csaf_aggregator/client.go +++ b/cmd/csaf_aggregator/client.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index edb1fd9..711238c 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/files.go b/cmd/csaf_aggregator/files.go index adf04aa..18ccbb6 100644 --- a/cmd/csaf_aggregator/files.go +++ b/cmd/csaf_aggregator/files.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/full.go b/cmd/csaf_aggregator/full.go index 600c650..fb8e0f9 100644 --- a/cmd/csaf_aggregator/full.go +++ b/cmd/csaf_aggregator/full.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 69954bd..598685c 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/interim.go b/cmd/csaf_aggregator/interim.go index bdd5ebc..692841f 100644 --- a/cmd/csaf_aggregator/interim.go +++ b/cmd/csaf_aggregator/interim.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/lazytransaction.go b/cmd/csaf_aggregator/lazytransaction.go index a2b1e94..16470d3 100644 --- a/cmd/csaf_aggregator/lazytransaction.go +++ b/cmd/csaf_aggregator/lazytransaction.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/lister.go b/cmd/csaf_aggregator/lister.go index a3bfd29..4d758e4 100644 --- a/cmd/csaf_aggregator/lister.go +++ b/cmd/csaf_aggregator/lister.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/main.go b/cmd/csaf_aggregator/main.go index 74a9670..d5d04e5 100644 --- a/cmd/csaf_aggregator/main.go +++ b/cmd/csaf_aggregator/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 3acb48e..32e0cbf 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index ccd5062..fb9acde 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/config.go b/cmd/csaf_checker/config.go index 3502443..ac9ce62 100644 --- a/cmd/csaf_checker/config.go +++ b/cmd/csaf_checker/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_checker/links.go b/cmd/csaf_checker/links.go index 5784489..0456ace 100644 --- a/cmd/csaf_checker/links.go +++ b/cmd/csaf_checker/links.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/links_test.go b/cmd/csaf_checker/links_test.go index 3229511..8abf4e6 100644 --- a/cmd/csaf_checker/links_test.go +++ b/cmd/csaf_checker/links_test.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/main.go b/cmd/csaf_checker/main.go index 73a5cce..752fdf8 100644 --- a/cmd/csaf_checker/main.go +++ b/cmd/csaf_checker/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 8f3a6c1..451a315 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_checker/report.go b/cmd/csaf_checker/report.go index 2b53bb2..9b5251b 100644 --- a/cmd/csaf_checker/report.go +++ b/cmd/csaf_checker/report.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_checker/reporters.go b/cmd/csaf_checker/reporters.go index c707a14..016d371 100644 --- a/cmd/csaf_checker/reporters.go +++ b/cmd/csaf_checker/reporters.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index 94b1c2f..53d1150 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_checker/rules.go b/cmd/csaf_checker/rules.go index 6981b6b..eadbbb2 100644 --- a/cmd/csaf_checker/rules.go +++ b/cmd/csaf_checker/rules.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 367780f..39a4d05 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 7fa0c7c..38203bf 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022, 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2022, 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index eda6595..13957d5 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/forwarder_test.go b/cmd/csaf_downloader/forwarder_test.go index dc515ad..edfa476 100644 --- a/cmd/csaf_downloader/forwarder_test.go +++ b/cmd/csaf_downloader/forwarder_test.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/main.go b/cmd/csaf_downloader/main.go index daff163..9364b88 100644 --- a/cmd/csaf_downloader/main.go +++ b/cmd/csaf_downloader/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_downloader/stats.go b/cmd/csaf_downloader/stats.go index 237420a..94a38de 100644 --- a/cmd/csaf_downloader/stats.go +++ b/cmd/csaf_downloader/stats.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/stats_test.go b/cmd/csaf_downloader/stats_test.go index b3ab914..79406c7 100644 --- a/cmd/csaf_downloader/stats_test.go +++ b/cmd/csaf_downloader/stats_test.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_provider/actions.go b/cmd/csaf_provider/actions.go index 54d4e24..8f385e6 100644 --- a/cmd/csaf_provider/actions.go +++ b/cmd/csaf_provider/actions.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_provider/config.go b/cmd/csaf_provider/config.go index af99cc1..49a7204 100644 --- a/cmd/csaf_provider/config.go +++ b/cmd/csaf_provider/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/controller.go b/cmd/csaf_provider/controller.go index c8680ff..7f64fe2 100644 --- a/cmd/csaf_provider/controller.go +++ b/cmd/csaf_provider/controller.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/create.go b/cmd/csaf_provider/create.go index 8e882a5..56893c6 100644 --- a/cmd/csaf_provider/create.go +++ b/cmd/csaf_provider/create.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/files.go b/cmd/csaf_provider/files.go index 0b3c5ed..39a97e3 100644 --- a/cmd/csaf_provider/files.go +++ b/cmd/csaf_provider/files.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/indices.go b/cmd/csaf_provider/indices.go index a7ecd3b..805371b 100644 --- a/cmd/csaf_provider/indices.go +++ b/cmd/csaf_provider/indices.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/main.go b/cmd/csaf_provider/main.go index 2264676..8740e81 100644 --- a/cmd/csaf_provider/main.go +++ b/cmd/csaf_provider/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/mux.go b/cmd/csaf_provider/mux.go index 34b7e2e..021c074 100644 --- a/cmd/csaf_provider/mux.go +++ b/cmd/csaf_provider/mux.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/rolie.go b/cmd/csaf_provider/rolie.go index ea48480..98448bd 100644 --- a/cmd/csaf_provider/rolie.go +++ b/cmd/csaf_provider/rolie.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/tmpl/create.html b/cmd/csaf_provider/tmpl/create.html index 74fef6d..0b06f6f 100644 --- a/cmd/csaf_provider/tmpl/create.html +++ b/cmd/csaf_provider/tmpl/create.html @@ -1,8 +1,8 @@ diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index b02165b..81a45fa 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -100,22 +100,12 @@ The following example file documents all available configuration options: #tlps = ["csaf", "white", "amber", "green", "red"] # Make the provider create a ROLIE service document. -#create_service_document = true +#create_service_document = false # Make the provider create a ROLIE category document from a list of strings. # If a list item starts with `expr:` # the rest of the string is used as a JsonPath expression # to extract a string from the incoming advisories. -# If the result of the expression is a string this string -# is used. If the result is an array each element of -# this array is tested if it is a string or an array. -# If this test fails the expression fails. If the -# test succeeds the rules are applied recursively to -# collect all strings in the result. -# Suggested expressions are: -# - vendor, product family and product names: "expr:$.product_tree..branches[?(@.category==\"vendor\" || @.category==\"product_family\" || @.category==\"product_name\")].name" -# - CVEs: "expr:$.vulnerabilities[*].cve" -# - CWEs: "expr:$.vulnerabilities[*].cwe.id" # Strings not starting with `expr:` are taken verbatim. # By default no category documents are created. # This example provides an overview over the syntax, From 2f9d5658eb8c34dd782d95b9cd030e348163d30d Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Mon, 13 May 2024 11:50:06 +0200 Subject: [PATCH 018/176] docs: remove unused license file (#544) * Remove LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt as the only code using it was already removed with 6b9ecead89c5b40e86928c6e7f416903e0a495e1. --- LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt | 51 ------------------- 1 file changed, 51 deletions(-) delete mode 100644 LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt diff --git a/LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt b/LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt deleted file mode 100644 index fa1aad8..0000000 --- a/LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt +++ /dev/null @@ -1,51 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. From 7a5347803abc06dffbd106b8544e696d81ac3056 Mon Sep 17 00:00:00 2001 From: Florian von Samson <167841080+fvsamson@users.noreply.github.com> Date: Mon, 13 May 2024 14:36:03 +0200 Subject: [PATCH 019/176] docs: improve README.md's first sentence * Improve the structure of the sentence and the two links. --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 4c02b8f..bc9ae2a 100644 --- a/README.md +++ b/README.md @@ -10,9 +10,9 @@ # csaf_distribution -An implementation of a [CSAF](https://csaf.io/) -[2.0 Spec](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) -([Errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) +Implements a [CSAF](https://csaf.io/) +([specification v2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) +and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. From 33bd6bd78786564f56f458618df611e700eeeea3 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 12 Jun 2024 10:08:06 +0200 Subject: [PATCH 020/176] Extend unittest coverage in util --- util/csv_test.go | 40 +++++++++ util/file_test.go | 141 ++++++++++++++++++++++++++++++- util/hash_test.go | 109 ++++++++++++++++++++++++ util/json_test.go | 209 ++++++++++++++++++++++++++++++++++++++++++++++ util/set_test.go | 65 ++++++++++++++ util/url_test.go | 36 ++++++++ 6 files changed, 599 insertions(+), 1 deletion(-) create mode 100644 util/csv_test.go create mode 100644 util/hash_test.go create mode 100644 util/json_test.go create mode 100644 util/set_test.go create mode 100644 util/url_test.go diff --git a/util/csv_test.go b/util/csv_test.go new file mode 100644 index 0000000..a744b75 --- /dev/null +++ b/util/csv_test.go @@ -0,0 +1,40 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "bytes" + "testing" +) + +func TestCSV(t *testing.T) { + buf := new(bytes.Buffer) + csvWriter := NewFullyQuotedCSWWriter(buf) + for _, x := range [][]string{{"a", "b", "c"}, {"d", "e", "f"}} { + err := csvWriter.Write(x) + if err != nil { + t.Error(err) + } + } + + csvWriter.Flush() + err := csvWriter.Error() + if err != nil { + t.Error(err) + } + for _, want := range []string{`"a","b","c"`, `"d","e","f"`} { + got, err := buf.ReadString('\n') + if err != nil { + t.Error(err) + } + if got[:len(got)-1] != want { + t.Errorf("FullyQuotedCSWWriter: Expected %q but got %q.", want, got) + } + } +} diff --git a/util/file_test.go b/util/file_test.go index 3f648b8..320f3d4 100644 --- a/util/file_test.go +++ b/util/file_test.go @@ -10,6 +10,8 @@ package util import ( "bytes" + "os" + "path/filepath" "testing" ) @@ -55,8 +57,54 @@ func TestConformingFileName(t *testing.T) { } } -func TestNWriter(t *testing.T) { +func TestIDMatchesFilename(t *testing.T) { + pathEval := NewPathEval() + doc := make(map[string]interface{}) + doc["document"] = map[string]interface{}{ + "tracking": map[string]interface{}{ + "id": "valid.json", + }, + } + + err := IDMatchesFilename(pathEval, doc, "valid.json") + if err != nil { + t.Errorf("IDMatchesFilename: Expected nil, got %q", err) + } + + err = IDMatchesFilename(pathEval, doc, "different_file_name.json") + if err == nil { + t.Error("IDMatchesFilename: Expected error, got nil") + } + + doc["document"] = map[string]interface{}{ + "tracking": map[string]interface{}{}, + } + err = IDMatchesFilename(pathEval, doc, "valid.json") + if err == nil { + t.Error("IDMatchesFilename: Expected error, got nil") + } +} + +func TestPathExists(t *testing.T) { + got, err := PathExists("/this/path/does/not/exist") + if err != nil { + t.Error(err) + } + if got != false { + t.Error("PathExists: Expected false, got true") + } + dir := t.TempDir() + got, err = PathExists(dir) + if err != nil { + t.Error(err) + } + if got != true { + t.Error("PathExists: Expected true, got false") + } +} + +func TestNWriter(t *testing.T) { msg := []byte("Gruß!\n") first, second := msg[:len(msg)/2], msg[len(msg)/2:] @@ -78,3 +126,94 @@ func TestNWriter(t *testing.T) { t.Errorf("Expected %q, but got %q", msg, out) } } + +func TestWriteToFile(t *testing.T) { + filename := filepath.Join(t.TempDir(), "test_file") + wt := bytes.NewBufferString("test_data") + err := WriteToFile(filename, wt) + if err != nil { + t.Error(err) + } + fileData, err := os.ReadFile(filename) + if err != nil { + t.Error(err) + } + if !bytes.Equal(fileData, []byte("test_data")) { + t.Errorf("DeepCopy: Expected test_data, got %v", fileData) + } +} + +func TestMakeUniqFile(t *testing.T) { + dir := t.TempDir() + _, file, err := MakeUniqFile(dir) + if err != nil { + t.Error(err) + } + _, err = file.Write([]byte("test_data")) + if err != nil { + t.Error(err) + } + err = file.Close() + if err != nil { + t.Error(err) + } +} + +func Test_mkUniq(t *testing.T) { + dir := t.TempDir() + name, err := mkUniq(dir+"/", func(name string) error { + return nil + }) + if err != nil { + t.Error(err) + } + firstTime := true + name1, err := mkUniq(dir+"/", func(_ string) error { + if firstTime { + firstTime = false + return os.ErrExist + } + return nil + }) + if err != nil { + t.Error(err) + } + if name == name1 { + t.Errorf("mkUniq: Expected unique names, got %v and %v", name, name1) + } +} + +func TestDeepCopy(t *testing.T) { + dir := t.TempDir() + os.MkdirAll(filepath.Join(dir, "src/folder0"), 0755) + os.MkdirAll(filepath.Join(dir, "dst"), 0755) + os.MkdirAll(filepath.Join(dir, "dst1"), 0755) + err := os.WriteFile(filepath.Join(dir, "src/folder0/test_file"), []byte("test_data"), 0755) + if err != nil { + t.Error(err) + } + + err = DeepCopy(filepath.Join(dir, "dst"), filepath.Join(dir, "src")) + if err != nil { + t.Error(err) + } + + fileData, err := os.ReadFile(filepath.Join(dir, "dst/folder0/test_file")) + if err != nil { + t.Error(err) + } + + if !bytes.Equal(fileData, []byte("test_data")) { + t.Errorf("DeepCopy: Expected test_data, got %v", fileData) + } + + err = DeepCopy("/path/does/not/exist", filepath.Join(dir, "src")) + if err == nil { + t.Error("DeepCopy: Expected error, got nil") + } + + err = DeepCopy(filepath.Join(dir, "dst1"), "/path/does/not/exist") + if err == nil { + t.Error("DeepCopy: Expected error, got nil") + } +} diff --git a/util/hash_test.go b/util/hash_test.go new file mode 100644 index 0000000..ed0f0b2 --- /dev/null +++ b/util/hash_test.go @@ -0,0 +1,109 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "hash" + "os" + "path/filepath" + "reflect" + "strings" + "testing" +) + +func TestHashFromReader(t *testing.T) { + r := strings.NewReader("deadbeef") + want := []byte{0xde, 0xad, 0xbe, 0xef} + if got, err := HashFromReader(r); !reflect.DeepEqual(want, got) { + if err != nil { + t.Error(err) + } + t.Errorf("HashFromReader: Expected %v, got %v", want, got) + } +} + +func TestHashFromFile(t *testing.T) { + dir := t.TempDir() + filePath := filepath.Join(dir, "test_file") + testFile, err := os.Create(filePath) + if err != nil { + t.Error(err) + } + + testFile.WriteString("deadbeef") + want := []byte{0xde, 0xad, 0xbe, 0xef} + + testFile.Close() + + if got, err := HashFromFile(filePath); !reflect.DeepEqual(want, got) { + if err != nil { + t.Error(err) + } + t.Errorf("HashFromFile: Expected %v, got %v", want, got) + } +} + +type deadbeefHash struct { + hash.Hash +} + +func (deadbeefHash) Write(p []byte) (int, error) { return len(p), nil } +func (deadbeefHash) Sum(_ []byte) []byte { return []byte{0xde, 0xad, 0xbe, 0xef} } + +func TestWriteHashToFile(t *testing.T) { + dir := t.TempDir() + filePath := filepath.Join(dir, "test_file") + + hashArg := deadbeefHash{} + nameArg := "name" + want := "deadbeef " + nameArg + "\n" + + err := WriteHashToFile(filePath, nameArg, hashArg, []byte{}) + if err != nil { + t.Error(err) + } + testFile, err := os.Open(filePath) + if err != nil { + t.Error(err) + } + defer testFile.Close() + fileContent, err := os.ReadFile(filePath) + if err != nil { + t.Error(err) + } + if got := string(fileContent); got != want { + t.Errorf("WriteHashToFile: Expected %v, got %v", want, got) + } +} + +func TestWriteHashSumToFile(t *testing.T) { + dir := t.TempDir() + filePath := filepath.Join(dir, "test_file") + + sum := []byte{0xde, 0xad, 0xbe, 0xef} + nameArg := "name" + want := "deadbeef " + nameArg + "\n" + + err := WriteHashSumToFile(filePath, nameArg, sum) + if err != nil { + t.Error(err) + } + testFile, err := os.Open(filePath) + if err != nil { + t.Error(err) + } + defer testFile.Close() + fileContent, err := os.ReadFile(filePath) + if err != nil { + t.Error(err) + } + if got := string(fileContent); got != want { + t.Errorf("WriteHashSumToFile: Expected %v, got %v", want, got) + } +} diff --git a/util/json_test.go b/util/json_test.go new file mode 100644 index 0000000..452fabe --- /dev/null +++ b/util/json_test.go @@ -0,0 +1,209 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "context" + "reflect" + "testing" + "time" +) + +func TestPathEval_Compile(t *testing.T) { + pathEval := NewPathEval() + eval, err := pathEval.Compile("foo") + if err != nil { + t.Error(err) + } + + // Check caching + eval1, err := pathEval.Compile("foo") + if err != nil { + t.Error(err) + } + if reflect.ValueOf(eval).Pointer() != reflect.ValueOf(eval1).Pointer() { + t.Error("PathEval_Compile: Expected cached eval") + } + + got, err := eval.EvalInt(context.Background(), map[string]interface{}{"foo": 5}) + if err != nil { + t.Error(err) + } + if got != 5 { + t.Errorf("PathEval_Compile: Expected 5, got %v", got) + } +} + +func TestPathEval_Eval(t *testing.T) { + pathEval := NewPathEval() + _, err := pathEval.Eval("foo", nil) + if err == nil { + t.Error("PathEval_Eval: Expected error, got nil") + } + got, err := pathEval.Eval("foo", map[string]interface{}{"foo": 5}) + if err != nil { + t.Error(err) + } + if got != 5 { + t.Errorf("PathEval_Compile: Expected 5, got %v", got) + } +} + +func TestReMarshalMatcher(t *testing.T) { + var intDst int + var uintSrc uint = 2 + remarshalFunc := ReMarshalMatcher(&intDst) + err := remarshalFunc(uintSrc) + if err != nil { + t.Error(err) + } + if intDst != 2 { + t.Errorf("ReMarshalMatcher: Expected %v, got %v", uintSrc, intDst) + } +} + +func TestBoolMatcher(t *testing.T) { + var boolDst bool + boolFunc := BoolMatcher(&boolDst) + err := boolFunc(true) + if err != nil { + t.Error(err) + } + + if boolDst != true { + t.Error("BoolMatcher: Expected true got false") + } + + err = boolFunc(1) + if err == nil { + t.Error("BoolMatcher: Expected error, got nil") + } +} + +func TestStringMatcher(t *testing.T) { + var stringDst string + stringFunc := StringMatcher(&stringDst) + err := stringFunc("test") + if err != nil { + t.Error(err) + } + + if stringDst != "test" { + t.Errorf("StringMatcher: Expected test, got %v", stringDst) + } + + err = stringFunc(1) + if err == nil { + t.Error("StringMatcher: Expected error, got nil") + } +} + +func TestStringTreeMatcher(t *testing.T) { + var stringTreeDst []string + stringTreeFunc := StringTreeMatcher(&stringTreeDst) + err := stringTreeFunc([]any{"a", "a", "b"}) + if err != nil { + t.Error(err) + } + + wantAnySlice := []any{"a", "b"} + if reflect.DeepEqual(stringTreeDst, wantAnySlice) { + t.Errorf("StringTreeMatcher: Expected %v, got %v", wantAnySlice, stringTreeDst) + } + + err = stringTreeFunc([]string{"a", "a", "b"}) + if err == nil { + t.Error("StringTreeMatcher: Expected error, got nil") + } + + err = stringTreeFunc(1) + if err == nil { + t.Error("StringTreeMatcher: Expected error, got nil") + } +} + +func TestTimeMatcher(t *testing.T) { + var timeDst time.Time + timeFunc := TimeMatcher(&timeDst, time.RFC3339) + err := timeFunc("2024-03-18T12:57:48.236Z") + if err != nil { + t.Error(err) + } + wantTime := time.Date(2024, time.March, 18, 12, 57, 48, 236_000_000, time.UTC) + if timeDst != wantTime { + t.Errorf("TimeMatcher: Expected %v, got %v", wantTime, timeDst) + } + + err = timeFunc("") + if err == nil { + t.Error("TimeMatcher: Expected error, got nil") + } + + err = timeFunc(1) + if err == nil { + t.Error("TimeMatcher: Expected error, got nil") + } +} + +func TestPathEval_Extract(t *testing.T) { + pathEval := NewPathEval() + var result string + matcher := StringMatcher(&result) + err := pathEval.Extract("foo", matcher, true, map[string]interface{}{"foo": "bar"}) + if err != nil { + t.Error(err) + } + if result != "bar" { + t.Errorf("PathEval_Extract: Expected bar, got %v", result) + } +} + +func TestPathEval_Match(t *testing.T) { + var got string + doc := map[string]interface{}{"foo": "bar"} + + pe := NewPathEval() + pem := PathEvalMatcher{Expr: "foo", Action: StringMatcher(&got)} + + err := pe.Match([]PathEvalMatcher{pem}, doc) + if err != nil { + t.Error(err) + } + if got != "bar" { + t.Errorf("PathEval_Match: Expected bar, got %v", got) + } +} + +func TestPathEval_Strings(t *testing.T) { + pe := NewPathEval() + doc := map[string]interface{}{"foo": "bar"} + want := []string{"bar"} + + got, err := pe.Strings([]string{"foo"}, true, doc) + if err != nil { + t.Error(err) + } + + if !reflect.DeepEqual(got, want) { + t.Errorf("PathEval_Strings: Expected %v, got %v", want, got) + } +} + +func TestAsStrings(t *testing.T) { + arg := []interface{}{"foo", "bar"} + want := []string{"foo", "bar"} + + got, valid := AsStrings(arg) + if !valid { + t.Error("AsStrings: Expected true, got false") + } + if !reflect.DeepEqual(got, want) { + t.Errorf("AsStrings: Expected %v, got %v", want, got) + } +} diff --git a/util/set_test.go b/util/set_test.go new file mode 100644 index 0000000..a28878e --- /dev/null +++ b/util/set_test.go @@ -0,0 +1,65 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "reflect" + "sort" + "testing" +) + +func TestSet(t *testing.T) { + s := Set[int]{} + if s.Contains(0) { + t.Error("Set.Contains: Expected false got true") + } + s.Add(0) + if !s.Contains(0) { + t.Error("Set.Contains: Expected true got false") + } + + s0 := Set[int]{} + s1 := Set[int]{} + + s0.Add(0) + s0.Add(1) + + s1.Add(0) + s1.Add(1) + s1.Add(2) + + diff0 := s0.Difference(s1) + diff1 := s1.Difference(s0) + + if reflect.DeepEqual(diff0, diff1) { + t.Errorf("Set.Difference: %q and %q are different", diff0, diff1) + } + + if s0.ContainsAll(s1) { + t.Error("Set.ContainsAll: Expected false got true") + } + + if !s1.ContainsAll(s0) { + t.Error("Set.ContainsAll: Expected true got false") + } + + s2 := Set[int]{} + s2.Add(0) + s2.Add(1) + s2.Add(2) + s2.Add(3) + + wantKeys := []int{0, 1, 2, 3} + gotKeys := s2.Keys() + sort.Ints(gotKeys) + + if !reflect.DeepEqual(wantKeys, gotKeys) { + t.Errorf("Set.Keys: Expected %q got %q", wantKeys, gotKeys) + } +} diff --git a/util/url_test.go b/util/url_test.go new file mode 100644 index 0000000..dec73dc --- /dev/null +++ b/util/url_test.go @@ -0,0 +1,36 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "net/url" + "testing" +) + +func TestBaseUrl(t *testing.T) { + for _, x := range [][2]string{ + {`http://example.com`, `http://example.com/`}, + {`scheme://example.com`, `scheme://example.com/`}, + {`https://example.com`, `https://example.com/`}, + {`https://example.com:8080/`, `https://example.com:8080/`}, + {`https://user@example.com:8080/`, `https://user@example.com:8080/`}, + {`https://user@example.com:8080/resource`, `https://user@example.com:8080/`}, + {`https://user@example.com:8080/resource/`, `https://user@example.com:8080/resource/`}, + {`https://user@example.com:8080/resource/#fragment`, `https://user@example.com:8080/resource/`}, + {`https://user@example.com:8080/resource/?query=test#fragment`, `https://user@example.com:8080/resource/`}, + } { + url, _ := url.Parse(x[0]) + if got, err := BaseURL(url); got != x[1] { + if err != nil { + t.Error(err) + } + t.Errorf("%q: Expected %q but got %q.", x[0], x[1], got) + } + } +} From e2ad3d3f8302a81be9fe4d20153aac2f0dc041bd Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Fri, 21 Jun 2024 14:02:51 +0200 Subject: [PATCH 021/176] docs: fix licensing info for generated files (#542) * docs: fix licensing info for generated files * change generate_cvss_enums.go to note that the input file is relevant for the license. * change license and copyright of cvss20enums.go and cvss3enums.go to BSD-3-Clause and FIRST. * add reuse.software 3.0 compatible files for the schema cvss files. * Stamp right license into generated files. --------- Co-authored-by: Sascha L. Teichmann --- LICENSES/BSD-3-Clause.txt | 11 +++++++++++ csaf/cvss20enums.go | 9 ++------- csaf/cvss3enums.go | 9 ++------- csaf/generate_cvss_enums.go | 28 +++++++++++++++++++++------- csaf/schema/cvss-v2.0.json.license | 2 ++ csaf/schema/cvss-v3.0.json.license | 2 ++ csaf/schema/cvss-v3.1.json.license | 2 ++ 7 files changed, 42 insertions(+), 21 deletions(-) create mode 100644 LICENSES/BSD-3-Clause.txt create mode 100644 csaf/schema/cvss-v2.0.json.license create mode 100644 csaf/schema/cvss-v3.0.json.license create mode 100644 csaf/schema/cvss-v3.1.json.license diff --git a/LICENSES/BSD-3-Clause.txt b/LICENSES/BSD-3-Clause.txt new file mode 100644 index 0000000..ea890af --- /dev/null +++ b/LICENSES/BSD-3-Clause.txt @@ -0,0 +1,11 @@ +Copyright (c) . + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/csaf/cvss20enums.go b/csaf/cvss20enums.go index 7056f3e..97d2e10 100644 --- a/csaf/cvss20enums.go +++ b/csaf/cvss20enums.go @@ -1,10 +1,5 @@ -// This file is Free Software under the Apache-2.0 License -// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. -// -// SPDX-License-Identifier: Apache-2.0 -// -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH +// SPDX-License-Identifier: BSD-3-Clause +// SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. // // THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! diff --git a/csaf/cvss3enums.go b/csaf/cvss3enums.go index b8cf54f..32e01e3 100644 --- a/csaf/cvss3enums.go +++ b/csaf/cvss3enums.go @@ -1,10 +1,5 @@ -// This file is Free Software under the Apache-2.0 License -// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. -// -// SPDX-License-Identifier: Apache-2.0 -// -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH +// SPDX-License-Identifier: BSD-3-Clause +// SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. // // THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! diff --git a/csaf/generate_cvss_enums.go b/csaf/generate_cvss_enums.go index 7c9b9fd..c84ab15 100644 --- a/csaf/generate_cvss_enums.go +++ b/csaf/generate_cvss_enums.go @@ -14,21 +14,21 @@ import ( "bytes" "encoding/json" "flag" + "fmt" "go/format" "log" "os" + "regexp" "sort" "strings" "text/template" ) -const tmplText = `// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. -// -// SPDX-License-Identifier: MIT -// -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH +// We from Intevation consider the source code parts in the following +// template file as too insignificant to be a piece of work that gains +// "copyrights" protection in the European Union. So the license(s) +// of the output files are fully determined by the input file. +const tmplText = `// {{ $.License }} // // THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! @@ -69,6 +69,7 @@ type definition struct { } type schema struct { + License []string `json:"license"` Definitions map[string]*definition `json:"definitions"` } @@ -137,9 +138,22 @@ func main() { } sort.Strings(defs) + license := "determine license(s) from input file and replace this line" + + pattern := regexp.MustCompile(`Copyright \(c\) (\d+), FIRST.ORG, INC.`) + for _, line := range s.License { + if m := pattern.FindStringSubmatch(line); m != nil { + license = fmt.Sprintf( + "SPDX-License-Identifier: BSD-3-Clause\n"+ + "// SPDX-FileCopyrightText: %s FIRST.ORG, INC.", m[1]) + break + } + } + var source bytes.Buffer check(tmpl.Execute(&source, map[string]any{ + "License": license, "Prefix": *prefix, "Definitions": s.Definitions, "Keys": defs, diff --git a/csaf/schema/cvss-v2.0.json.license b/csaf/schema/cvss-v2.0.json.license new file mode 100644 index 0000000..dd033e8 --- /dev/null +++ b/csaf/schema/cvss-v2.0.json.license @@ -0,0 +1,2 @@ +SPDX-License-Identifier: BSD-3-Clause +SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. diff --git a/csaf/schema/cvss-v3.0.json.license b/csaf/schema/cvss-v3.0.json.license new file mode 100644 index 0000000..dd033e8 --- /dev/null +++ b/csaf/schema/cvss-v3.0.json.license @@ -0,0 +1,2 @@ +SPDX-License-Identifier: BSD-3-Clause +SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. diff --git a/csaf/schema/cvss-v3.1.json.license b/csaf/schema/cvss-v3.1.json.license new file mode 100644 index 0000000..f87ced8 --- /dev/null +++ b/csaf/schema/cvss-v3.1.json.license @@ -0,0 +1,2 @@ +SPDX-License-Identifier: BSD-3-Clause +SPDX-FileCopyrightText: 2021 FIRST.ORG, INC. From 56fadc3a80f66d0006203e9983138c5171b07fbf Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Fri, 21 Jun 2024 14:04:20 +0200 Subject: [PATCH 022/176] docs: fix typo in examples/aggregator.toml (#539) --- docs/examples/aggregator.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/examples/aggregator.toml b/docs/examples/aggregator.toml index ae1723d..2161079 100644 --- a/docs/examples/aggregator.toml +++ b/docs/examples/aggregator.toml @@ -51,7 +51,7 @@ insecure = true # rate = 1.8 # insecure = true write_indices = true - # If aggregator.category == "aggreator", set for an entry that should + # If aggregator.category == "aggregator", set for an entry that should # be listed in addition: category = "lister" # ignore_pattern = [".*white.*", ".*red.*"] From 3084cdbc371f03adfe22c1640b53b43fed5a0563 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Fri, 21 Jun 2024 15:35:30 +0200 Subject: [PATCH 023/176] Address comments --- util/csv_test.go | 6 ++---- util/file_test.go | 54 ++++++++++++++++++++++------------------------- util/hash_test.go | 6 ++---- util/json_test.go | 49 ++++++++++++++++-------------------------- 4 files changed, 47 insertions(+), 68 deletions(-) diff --git a/util/csv_test.go b/util/csv_test.go index a744b75..575d83d 100644 --- a/util/csv_test.go +++ b/util/csv_test.go @@ -17,15 +17,13 @@ func TestCSV(t *testing.T) { buf := new(bytes.Buffer) csvWriter := NewFullyQuotedCSWWriter(buf) for _, x := range [][]string{{"a", "b", "c"}, {"d", "e", "f"}} { - err := csvWriter.Write(x) - if err != nil { + if err := csvWriter.Write(x); err != nil { t.Error(err) } } csvWriter.Flush() - err := csvWriter.Error() - if err != nil { + if err := csvWriter.Error(); err != nil { t.Error(err) } for _, want := range []string{`"a","b","c"`, `"d","e","f"`} { diff --git a/util/file_test.go b/util/file_test.go index 320f3d4..28c5196 100644 --- a/util/file_test.go +++ b/util/file_test.go @@ -60,28 +60,25 @@ func TestConformingFileName(t *testing.T) { func TestIDMatchesFilename(t *testing.T) { pathEval := NewPathEval() - doc := make(map[string]interface{}) - doc["document"] = map[string]interface{}{ - "tracking": map[string]interface{}{ + doc := make(map[string]any) + doc["document"] = map[string]any{ + "tracking": map[string]any{ "id": "valid.json", }, } - err := IDMatchesFilename(pathEval, doc, "valid.json") - if err != nil { + if err := IDMatchesFilename(pathEval, doc, "valid.json"); err != nil { t.Errorf("IDMatchesFilename: Expected nil, got %q", err) } - err = IDMatchesFilename(pathEval, doc, "different_file_name.json") - if err == nil { + if err := IDMatchesFilename(pathEval, doc, "different_file_name.json"); err == nil { t.Error("IDMatchesFilename: Expected error, got nil") } - doc["document"] = map[string]interface{}{ - "tracking": map[string]interface{}{}, + doc["document"] = map[string]any{ + "tracking": map[string]any{}, } - err = IDMatchesFilename(pathEval, doc, "valid.json") - if err == nil { + if err := IDMatchesFilename(pathEval, doc, "valid.json"); err == nil { t.Error("IDMatchesFilename: Expected error, got nil") } } @@ -130,8 +127,7 @@ func TestNWriter(t *testing.T) { func TestWriteToFile(t *testing.T) { filename := filepath.Join(t.TempDir(), "test_file") wt := bytes.NewBufferString("test_data") - err := WriteToFile(filename, wt) - if err != nil { + if err := WriteToFile(filename, wt); err != nil { t.Error(err) } fileData, err := os.ReadFile(filename) @@ -149,12 +145,10 @@ func TestMakeUniqFile(t *testing.T) { if err != nil { t.Error(err) } - _, err = file.Write([]byte("test_data")) - if err != nil { + if _, err = file.Write([]byte("test_data")); err != nil { t.Error(err) } - err = file.Close() - if err != nil { + if err = file.Close(); err != nil { t.Error(err) } } @@ -185,16 +179,20 @@ func Test_mkUniq(t *testing.T) { func TestDeepCopy(t *testing.T) { dir := t.TempDir() - os.MkdirAll(filepath.Join(dir, "src/folder0"), 0755) - os.MkdirAll(filepath.Join(dir, "dst"), 0755) - os.MkdirAll(filepath.Join(dir, "dst1"), 0755) - err := os.WriteFile(filepath.Join(dir, "src/folder0/test_file"), []byte("test_data"), 0755) - if err != nil { - t.Error(err) + if err := os.MkdirAll(filepath.Join(dir, "src/folder0"), 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(filepath.Join(dir, "dst"), 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(filepath.Join(dir, "dst1"), 0755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(dir, "src/folder0/test_file"), []byte("test_data"), 0755); err != nil { + t.Fatal(err) } - err = DeepCopy(filepath.Join(dir, "dst"), filepath.Join(dir, "src")) - if err != nil { + if err := DeepCopy(filepath.Join(dir, "dst"), filepath.Join(dir, "src")); err != nil { t.Error(err) } @@ -207,13 +205,11 @@ func TestDeepCopy(t *testing.T) { t.Errorf("DeepCopy: Expected test_data, got %v", fileData) } - err = DeepCopy("/path/does/not/exist", filepath.Join(dir, "src")) - if err == nil { + if err = DeepCopy("/path/does/not/exist", filepath.Join(dir, "src")); err == nil { t.Error("DeepCopy: Expected error, got nil") } - err = DeepCopy(filepath.Join(dir, "dst1"), "/path/does/not/exist") - if err == nil { + if err = DeepCopy(filepath.Join(dir, "dst1"), "/path/does/not/exist"); err == nil { t.Error("DeepCopy: Expected error, got nil") } } diff --git a/util/hash_test.go b/util/hash_test.go index ed0f0b2..d690891 100644 --- a/util/hash_test.go +++ b/util/hash_test.go @@ -64,8 +64,7 @@ func TestWriteHashToFile(t *testing.T) { nameArg := "name" want := "deadbeef " + nameArg + "\n" - err := WriteHashToFile(filePath, nameArg, hashArg, []byte{}) - if err != nil { + if err := WriteHashToFile(filePath, nameArg, hashArg, []byte{}); err != nil { t.Error(err) } testFile, err := os.Open(filePath) @@ -90,8 +89,7 @@ func TestWriteHashSumToFile(t *testing.T) { nameArg := "name" want := "deadbeef " + nameArg + "\n" - err := WriteHashSumToFile(filePath, nameArg, sum) - if err != nil { + if err := WriteHashSumToFile(filePath, nameArg, sum); err != nil { t.Error(err) } testFile, err := os.Open(filePath) diff --git a/util/json_test.go b/util/json_test.go index 452fabe..ba18171 100644 --- a/util/json_test.go +++ b/util/json_test.go @@ -31,7 +31,7 @@ func TestPathEval_Compile(t *testing.T) { t.Error("PathEval_Compile: Expected cached eval") } - got, err := eval.EvalInt(context.Background(), map[string]interface{}{"foo": 5}) + got, err := eval.EvalInt(context.Background(), map[string]any{"foo": 5}) if err != nil { t.Error(err) } @@ -46,7 +46,7 @@ func TestPathEval_Eval(t *testing.T) { if err == nil { t.Error("PathEval_Eval: Expected error, got nil") } - got, err := pathEval.Eval("foo", map[string]interface{}{"foo": 5}) + got, err := pathEval.Eval("foo", map[string]any{"foo": 5}) if err != nil { t.Error(err) } @@ -59,8 +59,7 @@ func TestReMarshalMatcher(t *testing.T) { var intDst int var uintSrc uint = 2 remarshalFunc := ReMarshalMatcher(&intDst) - err := remarshalFunc(uintSrc) - if err != nil { + if err := remarshalFunc(uintSrc); err != nil { t.Error(err) } if intDst != 2 { @@ -71,8 +70,7 @@ func TestReMarshalMatcher(t *testing.T) { func TestBoolMatcher(t *testing.T) { var boolDst bool boolFunc := BoolMatcher(&boolDst) - err := boolFunc(true) - if err != nil { + if err := boolFunc(true); err != nil { t.Error(err) } @@ -80,8 +78,7 @@ func TestBoolMatcher(t *testing.T) { t.Error("BoolMatcher: Expected true got false") } - err = boolFunc(1) - if err == nil { + if err := boolFunc(1); err == nil { t.Error("BoolMatcher: Expected error, got nil") } } @@ -89,8 +86,7 @@ func TestBoolMatcher(t *testing.T) { func TestStringMatcher(t *testing.T) { var stringDst string stringFunc := StringMatcher(&stringDst) - err := stringFunc("test") - if err != nil { + if err := stringFunc("test"); err != nil { t.Error(err) } @@ -98,8 +94,7 @@ func TestStringMatcher(t *testing.T) { t.Errorf("StringMatcher: Expected test, got %v", stringDst) } - err = stringFunc(1) - if err == nil { + if err := stringFunc(1); err == nil { t.Error("StringMatcher: Expected error, got nil") } } @@ -107,8 +102,7 @@ func TestStringMatcher(t *testing.T) { func TestStringTreeMatcher(t *testing.T) { var stringTreeDst []string stringTreeFunc := StringTreeMatcher(&stringTreeDst) - err := stringTreeFunc([]any{"a", "a", "b"}) - if err != nil { + if err := stringTreeFunc([]any{"a", "a", "b"}); err != nil { t.Error(err) } @@ -117,13 +111,11 @@ func TestStringTreeMatcher(t *testing.T) { t.Errorf("StringTreeMatcher: Expected %v, got %v", wantAnySlice, stringTreeDst) } - err = stringTreeFunc([]string{"a", "a", "b"}) - if err == nil { + if err := stringTreeFunc([]string{"a", "a", "b"}); err == nil { t.Error("StringTreeMatcher: Expected error, got nil") } - err = stringTreeFunc(1) - if err == nil { + if err := stringTreeFunc(1); err == nil { t.Error("StringTreeMatcher: Expected error, got nil") } } @@ -131,8 +123,7 @@ func TestStringTreeMatcher(t *testing.T) { func TestTimeMatcher(t *testing.T) { var timeDst time.Time timeFunc := TimeMatcher(&timeDst, time.RFC3339) - err := timeFunc("2024-03-18T12:57:48.236Z") - if err != nil { + if err := timeFunc("2024-03-18T12:57:48.236Z"); err != nil { t.Error(err) } wantTime := time.Date(2024, time.March, 18, 12, 57, 48, 236_000_000, time.UTC) @@ -140,13 +131,11 @@ func TestTimeMatcher(t *testing.T) { t.Errorf("TimeMatcher: Expected %v, got %v", wantTime, timeDst) } - err = timeFunc("") - if err == nil { + if err := timeFunc(""); err == nil { t.Error("TimeMatcher: Expected error, got nil") } - err = timeFunc(1) - if err == nil { + if err := timeFunc(1); err == nil { t.Error("TimeMatcher: Expected error, got nil") } } @@ -155,8 +144,7 @@ func TestPathEval_Extract(t *testing.T) { pathEval := NewPathEval() var result string matcher := StringMatcher(&result) - err := pathEval.Extract("foo", matcher, true, map[string]interface{}{"foo": "bar"}) - if err != nil { + if err := pathEval.Extract("foo", matcher, true, map[string]any{"foo": "bar"}); err != nil { t.Error(err) } if result != "bar" { @@ -166,13 +154,12 @@ func TestPathEval_Extract(t *testing.T) { func TestPathEval_Match(t *testing.T) { var got string - doc := map[string]interface{}{"foo": "bar"} + doc := map[string]any{"foo": "bar"} pe := NewPathEval() pem := PathEvalMatcher{Expr: "foo", Action: StringMatcher(&got)} - err := pe.Match([]PathEvalMatcher{pem}, doc) - if err != nil { + if err := pe.Match([]PathEvalMatcher{pem}, doc); err != nil { t.Error(err) } if got != "bar" { @@ -182,7 +169,7 @@ func TestPathEval_Match(t *testing.T) { func TestPathEval_Strings(t *testing.T) { pe := NewPathEval() - doc := map[string]interface{}{"foo": "bar"} + doc := map[string]any{"foo": "bar"} want := []string{"bar"} got, err := pe.Strings([]string{"foo"}, true, doc) @@ -196,7 +183,7 @@ func TestPathEval_Strings(t *testing.T) { } func TestAsStrings(t *testing.T) { - arg := []interface{}{"foo", "bar"} + arg := []any{"foo", "bar"} want := []string{"foo", "bar"} got, valid := AsStrings(arg) From 5c6736b178b113f6abc2cad6efd9301d5fbbe18e Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 24 Jun 2024 11:57:38 +0200 Subject: [PATCH 024/176] Remove data races in downloader caused by shared use of json path eval. (#547) --- cmd/csaf_downloader/downloader.go | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index c5c3e02..a0cf34e 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -40,7 +40,6 @@ import ( type downloader struct { cfg *config keys *crypto.KeyRing - eval *util.PathEval validator csaf.RemoteValidator forwarder *forwarder mkdirMu sync.Mutex @@ -73,7 +72,6 @@ func newDownloader(cfg *config) (*downloader, error) { return &downloader{ cfg: cfg, - eval: util.NewPathEval(), validator: validator, }, nil } @@ -218,17 +216,20 @@ func (d *downloader) download(ctx context.Context, domain string) error { return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err) } + expr := util.NewPathEval() + if err := d.loadOpenPGPKeys( client, lpmd.Document, base, + expr, ); err != nil { return err } afp := csaf.NewAdvisoryFileProcessor( client, - d.eval, + expr, lpmd.Document, base) @@ -297,9 +298,10 @@ func (d *downloader) loadOpenPGPKeys( client util.Client, doc any, base *url.URL, + expr *util.PathEval, ) error { - src, err := d.eval.Eval("$.public_openpgp_keys", doc) + src, err := expr.Eval("$.public_openpgp_keys", doc) if err != nil { // no keys. return nil @@ -421,6 +423,7 @@ func (d *downloader) downloadWorker( dateExtract = util.TimeMatcher(&initialReleaseDate, time.RFC3339) lower = strings.ToLower(string(label)) stats = stats{} + expr = util.NewPathEval() ) // Add collected stats back to total. @@ -588,7 +591,7 @@ nextAdvisory: // Validate if filename is conforming. filenameCheck := func() error { - if err := util.IDMatchesFilename(d.eval, doc, filename); err != nil { + if err := util.IDMatchesFilename(expr, doc, filename); err != nil { stats.filenameFailed++ return fmt.Errorf("filename not conforming %s: %s", file.URL(), err) } @@ -651,7 +654,7 @@ nextAdvisory: continue } - if err := d.eval.Extract( + if err := expr.Extract( `$.document.tracking.initial_release_date`, dateExtract, false, doc, ); err != nil { slog.Warn("Cannot extract initial_release_date from advisory", From a46c286cf482451e8f395d367ef8ad3c705cdfd4 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Fri, 19 Apr 2024 14:04:12 +0200 Subject: [PATCH 025/176] fix: don't drop error messages from loading provider-metadata.json previously in case case of trying last resort dns, all other error messages were dropped --- csaf/providermetaloader.go | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index 203f2b3..0c4fc3b 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -173,6 +173,8 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata // We have a candidate. if wellknownResult.Valid() { wellknownGood = wellknownResult + } else { + pmdl.messages.AppendUnique(wellknownResult.Messages) } // Next load the PMDs from security.txt @@ -220,25 +222,28 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata } } // Take the good well-known. - wellknownGood.Messages.AppendUnique(pmdl.messages) + wellknownGood.Messages = pmdl.messages return wellknownGood } // Don't have well-known. Take first good from security.txt. ignoreExtras() - secGoods[0].Messages.AppendUnique(pmdl.messages) + secGoods[0].Messages = pmdl.messages return secGoods[0] } // If we have a good well-known take it. if wellknownGood != nil { - wellknownGood.Messages.AppendUnique(pmdl.messages) + wellknownGood.Messages = pmdl.messages return wellknownGood } // Last resort: fall back to DNS. dnsURL := "https://csaf.data.security." + domain - return pmdl.loadFromURL(dnsURL) + dnsURLResult := pmdl.loadFromURL(dnsURL) + pmdl.messages.AppendUnique(dnsURLResult.Messages) // keep order of messages consistent (i.e. last occurred message is last element) + dnsURLResult.Messages = pmdl.messages + return dnsURLResult } // loadFromSecurity loads the PMDs mentioned in the security.txt. From 51dc9b5bcb26c74bc3e46f3c9cf0e7d190cc41d1 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Fri, 19 Apr 2024 14:06:56 +0200 Subject: [PATCH 026/176] refactor: deduplicate filtering pmd results from security.txt already done in `loadFromSecurity` --- csaf/providermetaloader.go | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index 0c4fc3b..b21ddc6 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -178,20 +178,7 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata } // Next load the PMDs from security.txt - secResults := pmdl.loadFromSecurity(domain) - - // Filter out the results which are valid. - var secGoods []*LoadedProviderMetadata - - for _, result := range secResults { - if len(result.Messages) > 0 { - // If there where validation issues append them - // to the overall report - pmdl.messages.AppendUnique(pmdl.messages) - } else { - secGoods = append(secGoods, result) - } - } + secGoods := pmdl.loadFromSecurity(domain) // Mention extra CSAF entries in security.txt. ignoreExtras := func() { @@ -246,7 +233,7 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata return dnsURLResult } -// loadFromSecurity loads the PMDs mentioned in the security.txt. +// loadFromSecurity loads the PMDs mentioned in the security.txt. Only valid PMDs are returned. func (pmdl *ProviderMetadataLoader) loadFromSecurity(domain string) []*LoadedProviderMetadata { // If .well-known fails try legacy location. From 1e531de82d35ab549fa4b07f828f21a38554c3a5 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Mon, 15 Jul 2024 10:52:13 +0200 Subject: [PATCH 027/176] fix: don't require debug level to print error details on failed loading of provider metadata json --- cmd/csaf_aggregator/processor.go | 14 +++++++++----- cmd/csaf_downloader/downloader.go | 13 ++++++++----- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index 9f10a77..5cb3628 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -89,17 +89,21 @@ func (w *worker) locateProviderMetadata(domain string) error { lpmd := loader.Load(domain) - if w.processor.cfg.Verbose { + if !lpmd.Valid() { for i := range lpmd.Messages { - w.log.Info( + w.log.Error( "Loading provider-metadata.json", "domain", domain, "message", lpmd.Messages[i].Message) } - } - - if !lpmd.Valid() { return fmt.Errorf("no valid provider-metadata.json found for '%s'", domain) + } else if w.processor.cfg.Verbose { + for i := range lpmd.Messages { + w.log.Debug( + "Loading provider-metadata.json", + "domain", domain, + "message", lpmd.Messages[i].Message) + } } w.metadataProvider = lpmd.Document diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index a0cf34e..e370f55 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -199,7 +199,14 @@ func (d *downloader) download(ctx context.Context, domain string) error { lpmd := loader.Load(domain) - if d.cfg.verbose() { + if !lpmd.Valid() { + for i := range lpmd.Messages { + slog.Error("Loading provider-metadata.json", + "domain", domain, + "message", lpmd.Messages[i].Message) + } + return fmt.Errorf("no valid provider-metadata.json found for '%s': ", domain) + } else if d.cfg.verbose() { for i := range lpmd.Messages { slog.Debug("Loading provider-metadata.json", "domain", domain, @@ -207,10 +214,6 @@ func (d *downloader) download(ctx context.Context, domain string) error { } } - if !lpmd.Valid() { - return fmt.Errorf("no valid provider-metadata.json found for '%s'", domain) - } - base, err := url.Parse(lpmd.URL) if err != nil { return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err) From bcf4d2f64aa267efe0e4cbf1a844d130fb708d23 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Tue, 16 Jul 2024 12:00:09 +0200 Subject: [PATCH 028/176] fix error message The error message had a trailing `:` which suggest that there are some details which were truncated. However the details are already printed before in the log. --- cmd/csaf_downloader/downloader.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index e370f55..badf060 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -205,7 +205,7 @@ func (d *downloader) download(ctx context.Context, domain string) error { "domain", domain, "message", lpmd.Messages[i].Message) } - return fmt.Errorf("no valid provider-metadata.json found for '%s': ", domain) + return fmt.Errorf("no valid provider-metadata.json found for '%s'", domain) } else if d.cfg.verbose() { for i := range lpmd.Messages { slog.Debug("Loading provider-metadata.json", From a131b0fb4bc97592d8ac4d80280706359b2a6811 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 25 Jul 2024 15:36:54 +0200 Subject: [PATCH 029/176] Improve SHA* marking --- cmd/csaf_checker/processor.go | 45 ++++++++++++++--- cmd/csaf_downloader/downloader.go | 34 ++++++++----- csaf/advisories.go | 83 +++++++++++++++---------------- 3 files changed, 99 insertions(+), 63 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 451a315..de42e18 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -20,6 +20,7 @@ import ( "fmt" "io" "log" + "log/slog" "net/http" "net/url" "path/filepath" @@ -138,7 +139,7 @@ func (m *topicMessages) info(format string, args ...any) { m.add(InfoType, format, args...) } -// use signals that we going to use this topic. +// use signals that we're going to use this topic. func (m *topicMessages) use() { if *m == nil { *m = []Message{} @@ -164,7 +165,7 @@ func (m *topicMessages) hasErrors() bool { return false } -// newProcessor returns an initilaized processor. +// newProcessor returns an initialized processor. func newProcessor(cfg *config) (*processor, error) { var validator csaf.RemoteValidator @@ -594,10 +595,15 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { var file csaf.AdvisoryFile - if sha256 != "" || sha512 != "" || sign != "" { - file = csaf.HashedAdvisoryFile{url, sha256, sha512, sign} - } else { - file = csaf.PlainAdvisoryFile(url) + switch { + case sha256 == "" && sha512 == "": + slog.Error("No hash listed on ROLIE feed", "file", url) + return + case sign == "": + slog.Error("No signature listed on ROLIE feed", "file", url) + return + default: + file = csaf.PlainAdvisoryFile{Path: url, SHA256: sha256, SHA512: sha512, Sign: sign} } files = append(files, file) @@ -888,7 +894,16 @@ func (p *processor) checkIndex(base string, mask whereType) error { p.badIntegrities.error("index.txt contains invalid URL %q in line %d", u, line) continue } - files = append(files, csaf.PlainAdvisoryFile(u)) + + SHA256 := p.checkURL(u + ".sha256") + SHA512 := p.checkURL(u + ".sha512") + sign := p.checkURL(u + ".asc") + files = append(files, csaf.PlainAdvisoryFile{ + Path: u, + SHA256: SHA256, + SHA512: SHA512, + Sign: sign, + }) } return files, scanner.Err() }() @@ -906,6 +921,15 @@ func (p *processor) checkIndex(base string, mask whereType) error { return p.integrity(files, base, mask, p.badIndices.add) } +// checkURL returns the URL if it is accessible. +func (p *processor) checkURL(url string) string { + _, err := p.client.Head(url) + if err != nil { + return url + } + return "" +} + // checkChanges fetches the "changes.csv" and calls the "checkTLS" method for HTTPs checks. // It extracts the file content, tests the column number and the validity of the time format // of the fields' values and if they are sorted properly. Then it passes the files to the @@ -970,9 +994,14 @@ func (p *processor) checkChanges(base string, mask whereType) error { continue } path := r[pathColumn] + + SHA256 := p.checkURL(path + ".sha256") + SHA512 := p.checkURL(path + ".sha512") + sign := p.checkURL(path + ".asc") + times, files = append(times, t), - append(files, csaf.PlainAdvisoryFile(path)) + append(files, csaf.PlainAdvisoryFile{Path: path, SHA256: SHA256, SHA512: SHA512, Sign: sign}) } return times, files, nil }() diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index badf060..025ed65 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -501,23 +501,31 @@ nextAdvisory: signData []byte ) - // Only hash when we have a remote counter part we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { - slog.Warn("Cannot fetch SHA256", - "url", file.SHA256URL(), - "error", err) + if file.SHA256URL() == "" { + slog.Info("SHA256 not present", "file", file.URL()) } else { - s256 = sha256.New() - writers = append(writers, s256) + // Only hash when we have a remote counterpart we can compare it with. + if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + slog.Warn("Cannot fetch SHA256", + "url", file.SHA256URL(), + "error", err) + } else { + s256 = sha256.New() + writers = append(writers, s256) + } } - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { - slog.Warn("Cannot fetch SHA512", - "url", file.SHA512URL(), - "error", err) + if file.SHA512URL() == "" { + slog.Info("SHA512 not present", "file", file.URL()) } else { - s512 = sha512.New() - writers = append(writers, s512) + if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + slog.Warn("Cannot fetch SHA512", + "url", file.SHA512URL(), + "error", err) + } else { + s512 = sha512.New() + writers = append(writers, s512) + } } // Remember the data as we need to store it to file later. diff --git a/csaf/advisories.go b/csaf/advisories.go index 6f07648..4aa7f52 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -34,55 +34,30 @@ type AdvisoryFile interface { // PlainAdvisoryFile is a simple implementation of checkFile. // The hash and signature files are directly constructed by extending // the file name. -type PlainAdvisoryFile string +type PlainAdvisoryFile struct { + Path string + SHA256 string + SHA512 string + Sign string +} // URL returns the URL of this advisory. -func (paf PlainAdvisoryFile) URL() string { return string(paf) } +func (paf PlainAdvisoryFile) URL() string { return paf.Path } // SHA256URL returns the URL of SHA256 hash file of this advisory. -func (paf PlainAdvisoryFile) SHA256URL() string { return string(paf) + ".sha256" } +func (paf PlainAdvisoryFile) SHA256URL() string { return paf.SHA256 } // SHA512URL returns the URL of SHA512 hash file of this advisory. -func (paf PlainAdvisoryFile) SHA512URL() string { return string(paf) + ".sha512" } +func (paf PlainAdvisoryFile) SHA512URL() string { return paf.SHA512 } // SignURL returns the URL of signature file of this advisory. -func (paf PlainAdvisoryFile) SignURL() string { return string(paf) + ".asc" } +func (paf PlainAdvisoryFile) SignURL() string { return paf.Sign } // LogValue implements [slog.LogValuer] func (paf PlainAdvisoryFile) LogValue() slog.Value { return slog.GroupValue(slog.String("url", paf.URL())) } -// HashedAdvisoryFile is a more involed version of checkFile. -// Here each component can be given explicitly. -// If a component is not given it is constructed by -// extending the first component. -type HashedAdvisoryFile [4]string - -func (haf HashedAdvisoryFile) name(i int, ext string) string { - if haf[i] != "" { - return haf[i] - } - return haf[0] + ext -} - -// URL returns the URL of this advisory. -func (haf HashedAdvisoryFile) URL() string { return haf[0] } - -// SHA256URL returns the URL of SHA256 hash file of this advisory. -func (haf HashedAdvisoryFile) SHA256URL() string { return haf.name(1, ".sha256") } - -// SHA512URL returns the URL of SHA512 hash file of this advisory. -func (haf HashedAdvisoryFile) SHA512URL() string { return haf.name(2, ".sha512") } - -// SignURL returns the URL of signature file of this advisory. -func (haf HashedAdvisoryFile) SignURL() string { return haf.name(3, ".asc") } - -// LogValue implements [slog.LogValuer] -func (haf HashedAdvisoryFile) LogValue() slog.Value { - return slog.GroupValue(slog.String("url", haf.URL())) -} - // AdvisoryFileProcessor implements the extraction of // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { @@ -120,7 +95,7 @@ func empty(arr []string) bool { return true } -// Process extracts the adivisory filenames and passes them with +// Process extracts the advisory filenames and passes them with // the corresponding label to fn. func (afp *AdvisoryFileProcessor) Process( fn func(TLPLabel, []AdvisoryFile) error, @@ -201,6 +176,15 @@ func (afp *AdvisoryFileProcessor) Process( return nil } +// checkURL returns the URL if it is accessible. +func (afp *AdvisoryFileProcessor) checkURL(url string) string { + _, err := afp.client.Head(url) + if err != nil { + return url + } + return "" +} + // loadChanges loads baseURL/changes.csv and returns a list of files // prefixed by baseURL/. func (afp *AdvisoryFileProcessor) loadChanges( @@ -257,8 +241,19 @@ func (afp *AdvisoryFileProcessor) loadChanges( lg("%q contains an invalid URL %q in line %d", changesURL, path, line) continue } + + self := base.JoinPath(path).String() + sha256 := afp.checkURL(self + ".sha256") + sha512 := afp.checkURL(self + ".sha512") + sign := afp.checkURL(self + ".asc") + files = append(files, - PlainAdvisoryFile(base.JoinPath(path).String())) + PlainAdvisoryFile{ + Path: path, + SHA256: sha256, + SHA512: sha512, + Sign: sign, + }) } return files, nil } @@ -325,7 +320,6 @@ func (afp *AdvisoryFileProcessor) processROLIE( } rfeed.Entries(func(entry *Entry) { - // Filter if we have date checking. if afp.AgeAccept != nil { if t := time.Time(entry.Updated); !t.IsZero() && !afp.AgeAccept(t) { @@ -359,10 +353,15 @@ func (afp *AdvisoryFileProcessor) processROLIE( var file AdvisoryFile - if sha256 != "" || sha512 != "" || sign != "" { - file = HashedAdvisoryFile{self, sha256, sha512, sign} - } else { - file = PlainAdvisoryFile(self) + switch { + case sha256 == "" && sha512 == "": + slog.Error("No hash listed on ROLIE feed", "file", self) + return + case sign == "": + slog.Error("No signature listed on ROLIE feed", "file", self) + return + default: + file = PlainAdvisoryFile{self, sha256, sha512, sign} } files = append(files, file) From 0ab851a87428ddce7a55a335bd0d58e8dc541e73 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 25 Jul 2024 13:26:29 +0200 Subject: [PATCH 030/176] Use a default user agent --- cmd/csaf_aggregator/config.go | 5 +++++ cmd/csaf_checker/processor.go | 8 +++----- cmd/csaf_downloader/downloader.go | 8 +++----- cmd/csaf_downloader/forwarder.go | 8 +++----- util/client.go | 5 +++++ 5 files changed, 19 insertions(+), 15 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 9808542..b73286c 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -301,6 +301,11 @@ func (c *config) httpClient(p *provider) util.Client { Client: client, Header: c.ExtraHeader, } + default: + client = &util.HeaderClient{ + Client: client, + Header: http.Header{}, + } } if c.Verbose { diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 451a315..49e815c 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -435,11 +435,9 @@ func (p *processor) fullClient() util.Client { client := util.Client(&hClient) // Add extra headers. - if len(p.cfg.ExtraHeader) > 0 { - client = &util.HeaderClient{ - Client: client, - Header: p.cfg.ExtraHeader, - } + client = &util.HeaderClient{ + Client: client, + Header: p.cfg.ExtraHeader, } // Add optional URL logging. diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index badf060..9cef294 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -126,11 +126,9 @@ func (d *downloader) httpClient() util.Client { client := util.Client(&hClient) // Add extra headers. - if len(d.cfg.ExtraHeader) > 0 { - client = &util.HeaderClient{ - Client: client, - Header: d.cfg.ExtraHeader, - } + client = &util.HeaderClient{ + Client: client, + Header: d.cfg.ExtraHeader, } // Add optional URL logging. diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index 3b1435a..c3681eb 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -111,11 +111,9 @@ func (f *forwarder) httpClient() util.Client { client := util.Client(&hClient) // Add extra headers. - if len(f.cfg.ForwardHeader) > 0 { - client = &util.HeaderClient{ - Client: client, - Header: f.cfg.ForwardHeader, - } + client = &util.HeaderClient{ + Client: client, + Header: f.cfg.ForwardHeader, } // Add optional URL logging. diff --git a/util/client.go b/util/client.go index 5a11c7b..441aaaa 100644 --- a/util/client.go +++ b/util/client.go @@ -61,6 +61,11 @@ func (hc *HeaderClient) Do(req *http.Request) (*http.Response, error) { req.Header.Add(key, v) } } + + // Use default user agent if none is set + if userAgent := hc.Header.Get("User-Agent"); userAgent == "" { + req.Header.Add("User-Agent", "csaf-distribution/"+SemVersion) + } return hc.Client.Do(req) } From 3a67fb52100dac0ca64719899afb431fbb8bd590 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Wed, 31 Jul 2024 11:00:40 +0200 Subject: [PATCH 031/176] Add user-agent documentation --- docs/csaf_checker.md | 13 +++++++++---- docs/csaf_downloader.md | 13 +++++++++++-- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index 58f77ca..a5bc0bf 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -30,9 +30,12 @@ Help Options: Will check all given _domains_, by trying each as a CSAF provider. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. + If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and checking proceeds from there. If no config file is explictly given the follwing places are searched for a config file: + ``` ~/.config/csaf/checker.toml ~/.csaf_checker.toml @@ -41,6 +44,7 @@ csaf_checker.toml with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems. Supported options in config files: + ``` output = "" format = "json" @@ -58,9 +62,10 @@ validator_preset = ["mandatory"] ``` Usage example: -` ./csaf_checker example.com -f html --rate=5.3 -H apikey:SECRET -o check-results.html` +`./csaf_checker example.com -f html --rate=5.3 -H apikey:SECRET -o check-results.html` Each performed check has a return type of either 0,1 or 2: + ``` type 0: success type 1: warning @@ -70,16 +75,16 @@ type 2: error The checker result is a success if no checks resulted in type 2, and a failure otherwise. The option `timerange` allows to only check advisories from a given time -interval. It can only be given once. See the +interval. It can only be given once. See the [downloader documentation](csaf_downloader.md#timerange-option) for details. - You can ignore certain advisories while checking by specifying a list of regular expressions[^1] to match their URLs by using the `ignorepattern` option. E.g. `-i='.*white.*' -i='*.red.*'` will ignore files which URLs contain the sub strings **white** or **red**. In the config file this has to be noted as: + ``` ignorepattern = [".*white.*", ".*red.*"] ``` @@ -88,7 +93,7 @@ ignorepattern = [".*white.*", ".*red.*"] The `role` given in the `provider-metadata.json` is not yet considered to change the overall result, -see https://github.com/csaf-poc/csaf_distribution/issues/221 . +see . If a provider hosts one or more advisories with a TLP level of AMBER or RED, then these advisories must be access protected. To check these advisories, authorization can be given via custom headers or certificates. diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index fcf6634..2831cb4 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -1,4 +1,5 @@ ## csaf_downloader + A tool to download CSAF documents from CSAF providers. ### Usage @@ -39,6 +40,8 @@ Help Options: Will download all CSAF documents for the given _domains_, by trying each as a CSAF provider. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. + If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and downloading procedes from there. Increasing the number of workers opens more connections to the web servers @@ -47,6 +50,7 @@ However, since this also increases the load on the servers, their administrators have taken countermeasures to limit this. If no config file is explictly given the follwing places are searched for a config file: + ``` ~/.config/csaf/downloader.toml ~/.csaf_downloader.toml @@ -56,6 +60,7 @@ csaf_downloader.toml with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems. Supported options in config files: + ``` # directory # not set by default insecure = false @@ -90,6 +95,7 @@ option. E.g. `-i='.*white.*' -i='*.red.*'` will ignore files which URLs contain the sub strings **white** or **red**. In the config file this has to be noted as: + ``` ignorepattern = [".*white.*", ".*red.*"] ``` @@ -106,16 +112,18 @@ into a given intervall. There are three possible notations: and 'y' for years are recognized. In these cases only integer values are accepted without any fractions. Some examples: + - `"3h"` means downloading the advisories that have changed in the last three hours. - - `"30m"` .. changed within the last thirty minutes. + - `"30m"` .. changed within the last thirty minutes. - `"3M2m"` .. changed within the last three months and two minutes. - - `"2y"` .. changed within the last two years. + - `"2y"` .. changed within the last two years. 2. Absolute. If the given string is an RFC 3339 date timestamp the time interval between this date and now is used. E.g. `"2006-01-02"` means that all files between 2006 January 2nd and now going to being downloaded. Accepted patterns are: + - `"2006-01-02T15:04:05Z"` - `"2006-01-02T15:04:05+07:00"` - `"2006-01-02T15:04:05-07:00"` @@ -134,6 +142,7 @@ into a given intervall. There are three possible notations: All interval boundaries are inclusive. #### Forwarding + The downloader is able to forward downloaded advisories and their checksums, OpenPGP signatures and validation results to an HTTP endpoint. The details of the implemented API are described [here](https://github.com/mfd2007/csaf_upload_interface). From be2e4e74242774d9e8bfb97f13886d9c4fa6e241 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Wed, 31 Jul 2024 11:42:45 +0200 Subject: [PATCH 032/176] Improve hash path handling of directory feeds --- cmd/csaf_checker/processor.go | 25 ++----------- cmd/csaf_downloader/downloader.go | 29 +++++++++------- csaf/advisories.go | 58 ++++++++++++++++++------------- 3 files changed, 52 insertions(+), 60 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index de42e18..38f3e34 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -895,15 +895,7 @@ func (p *processor) checkIndex(base string, mask whereType) error { continue } - SHA256 := p.checkURL(u + ".sha256") - SHA512 := p.checkURL(u + ".sha512") - sign := p.checkURL(u + ".asc") - files = append(files, csaf.PlainAdvisoryFile{ - Path: u, - SHA256: SHA256, - SHA512: SHA512, - Sign: sign, - }) + files = append(files, csaf.DirectoryAdvisoryFile{Path: u}) } return files, scanner.Err() }() @@ -921,15 +913,6 @@ func (p *processor) checkIndex(base string, mask whereType) error { return p.integrity(files, base, mask, p.badIndices.add) } -// checkURL returns the URL if it is accessible. -func (p *processor) checkURL(url string) string { - _, err := p.client.Head(url) - if err != nil { - return url - } - return "" -} - // checkChanges fetches the "changes.csv" and calls the "checkTLS" method for HTTPs checks. // It extracts the file content, tests the column number and the validity of the time format // of the fields' values and if they are sorted properly. Then it passes the files to the @@ -995,13 +978,9 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] - SHA256 := p.checkURL(path + ".sha256") - SHA512 := p.checkURL(path + ".sha512") - sign := p.checkURL(path + ".asc") - times, files = append(times, t), - append(files, csaf.PlainAdvisoryFile{Path: path, SHA256: SHA256, SHA512: SHA512, Sign: sign}) + append(files, csaf.DirectoryAdvisoryFile{Path: path}) } return times, files, nil }() diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 025ed65..3bf3647 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -501,31 +501,31 @@ nextAdvisory: signData []byte ) - if file.SHA256URL() == "" { - slog.Info("SHA256 not present", "file", file.URL()) - } else { - // Only hash when we have a remote counterpart we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + // Only hash when we have a remote counterpart we can compare it with. + if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + if !file.IsDirectory() { slog.Warn("Cannot fetch SHA256", "url", file.SHA256URL(), "error", err) } else { - s256 = sha256.New() - writers = append(writers, s256) + slog.Info("SHA256 not present", "file", file.URL()) } + } else { + s256 = sha256.New() + writers = append(writers, s256) } - if file.SHA512URL() == "" { - slog.Info("SHA512 not present", "file", file.URL()) - } else { - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + if !file.IsDirectory() { slog.Warn("Cannot fetch SHA512", "url", file.SHA512URL(), "error", err) } else { - s512 = sha512.New() - writers = append(writers, s512) + slog.Info("SHA512 not present", "file", file.URL()) } + } else { + s512 = sha512.New() + writers = append(writers, s512) } // Remember the data as we need to store it to file later. @@ -757,6 +757,9 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, } func loadHash(client util.Client, p string) ([]byte, []byte, error) { + if p == "" { + return nil, nil, fmt.Errorf("no hash path provided") + } resp, err := client.Get(p) if err != nil { return nil, nil, err diff --git a/csaf/advisories.go b/csaf/advisories.go index 4aa7f52..d05331c 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -29,11 +29,10 @@ type AdvisoryFile interface { SHA256URL() string SHA512URL() string SignURL() string + IsDirectory() bool } -// PlainAdvisoryFile is a simple implementation of checkFile. -// The hash and signature files are directly constructed by extending -// the file name. +// PlainAdvisoryFile contains all relevant urls of a remote file. type PlainAdvisoryFile struct { Path string SHA256 string @@ -53,11 +52,41 @@ func (paf PlainAdvisoryFile) SHA512URL() string { return paf.SHA512 } // SignURL returns the URL of signature file of this advisory. func (paf PlainAdvisoryFile) SignURL() string { return paf.Sign } +// IsDirectory returns true, if was fetched via directory feeds. +func (paf PlainAdvisoryFile) IsDirectory() bool { return false } + // LogValue implements [slog.LogValuer] func (paf PlainAdvisoryFile) LogValue() slog.Value { return slog.GroupValue(slog.String("url", paf.URL())) } +// DirectoryAdvisoryFile only contains the base file path. +// The hash and signature files are directly constructed by extending +// the file name. +type DirectoryAdvisoryFile struct { + Path string +} + +// URL returns the URL of this advisory. +func (daf DirectoryAdvisoryFile) URL() string { return daf.Path } + +// SHA256URL returns the URL of SHA256 hash file of this advisory. +func (daf DirectoryAdvisoryFile) SHA256URL() string { return daf.Path + ".sha256" } + +// SHA512URL returns the URL of SHA512 hash file of this advisory. +func (daf DirectoryAdvisoryFile) SHA512URL() string { return daf.Path + ".sha512" } + +// SignURL returns the URL of signature file of this advisory. +func (daf DirectoryAdvisoryFile) SignURL() string { return daf.Path + ".asc" } + +// IsDirectory returns true, if was fetched via directory feeds. +func (daf DirectoryAdvisoryFile) IsDirectory() bool { return true } + +// LogValue implements [slog.LogValuer] +func (daf DirectoryAdvisoryFile) LogValue() slog.Value { + return slog.GroupValue(slog.String("url", daf.URL())) +} + // AdvisoryFileProcessor implements the extraction of // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { @@ -69,7 +98,7 @@ type AdvisoryFileProcessor struct { base *url.URL } -// NewAdvisoryFileProcessor constructs an filename extractor +// NewAdvisoryFileProcessor constructs a filename extractor // for a given metadata document. func NewAdvisoryFileProcessor( client util.Client, @@ -176,15 +205,6 @@ func (afp *AdvisoryFileProcessor) Process( return nil } -// checkURL returns the URL if it is accessible. -func (afp *AdvisoryFileProcessor) checkURL(url string) string { - _, err := afp.client.Head(url) - if err != nil { - return url - } - return "" -} - // loadChanges loads baseURL/changes.csv and returns a list of files // prefixed by baseURL/. func (afp *AdvisoryFileProcessor) loadChanges( @@ -242,18 +262,8 @@ func (afp *AdvisoryFileProcessor) loadChanges( continue } - self := base.JoinPath(path).String() - sha256 := afp.checkURL(self + ".sha256") - sha512 := afp.checkURL(self + ".sha512") - sign := afp.checkURL(self + ".asc") - files = append(files, - PlainAdvisoryFile{ - Path: path, - SHA256: sha256, - SHA512: sha512, - Sign: sign, - }) + DirectoryAdvisoryFile{Path: base.JoinPath(path).String()}) } return files, nil } From 1a2ce684ff94a0f47a4b9737698b1961b4aae91b Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 1 Aug 2024 14:53:23 +0200 Subject: [PATCH 033/176] improve default header * use `csaf_distribution` with an underscore as underscores are allowed by RFC9110 and it is more consistent as it is used with underscore at other places. * change example to `VERSION` to indicate that this is dynamic. --- docs/csaf_checker.md | 2 +- docs/csaf_downloader.md | 2 +- util/client.go | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index a5bc0bf..0b223b6 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -30,7 +30,7 @@ Help Options: Will check all given _domains_, by trying each as a CSAF provider. -If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf_distribution/VERSION` is sent. If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and checking proceeds from there. diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 2831cb4..003ae4a 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -40,7 +40,7 @@ Help Options: Will download all CSAF documents for the given _domains_, by trying each as a CSAF provider. -If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf_distribution/VERSION` is sent. If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and downloading procedes from there. diff --git a/util/client.go b/util/client.go index 441aaaa..b4478ca 100644 --- a/util/client.go +++ b/util/client.go @@ -64,7 +64,7 @@ func (hc *HeaderClient) Do(req *http.Request) (*http.Response, error) { // Use default user agent if none is set if userAgent := hc.Header.Get("User-Agent"); userAgent == "" { - req.Header.Add("User-Agent", "csaf-distribution/"+SemVersion) + req.Header.Add("User-Agent", "csaf_distribution/"+SemVersion) } return hc.Client.Do(req) } From 13a635c7e34c56e4ff39cbfc1ef1e2b6e7bd230a Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 1 Aug 2024 15:43:35 +0200 Subject: [PATCH 034/176] Add user-agent documentation to aggregator --- docs/csaf_aggregator.md | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/docs/csaf_aggregator.md b/docs/csaf_aggregator.md index 36cbe7e..661871c 100644 --- a/docs/csaf_aggregator.md +++ b/docs/csaf_aggregator.md @@ -16,6 +16,7 @@ Help Options: ``` If no config file is explictly given the follwing places are searched for a config file: + ``` ~/.config/csaf/aggregator.toml ~/.csaf_aggregator.toml @@ -25,6 +26,7 @@ csaf_aggregator.toml with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems. Usage example for a single run, to test if the config is good: + ```bash ./csaf_aggregator -c docs/examples/aggregator.toml ``` @@ -62,7 +64,6 @@ SHELL=/bin/bash 30 0-23 * * * $HOME/bin/csaf_aggregator --config /etc/csaf_aggregator.toml --interim >> /var/log/csaf_aggregator/interim.log 2>&1 ``` - #### serve via web server Serve the paths where the aggregator writes its `html/` output @@ -78,7 +79,6 @@ a template. For the aggregator the difference is that you can leave out the cgi-bin part, potentially commend out the TLS client parts and adjust the `root` path accordingly. - ### config options The config file is written in [TOML](https://toml.io/en/v1.0.0). @@ -118,10 +118,12 @@ Next we have two TOML _tables_: aggregator // basic infos for the aggregator object remote_validator // config for optional remote validation checker ``` + [See the provider config](csaf_provider.md#provider-options) about how to configure `remote_validator`. At last there is the TOML _array of tables_: + ``` providers // each entry to be mirrored or listed ``` @@ -148,6 +150,9 @@ header Where valid `name` and `domain` settings are required. +If no user agent is specified with `header = "user-agent:custom-agent/1.0"` +then the default agent in the form of `csaf_distribution/VERSION` is sent. + If you want an entry to be listed instead of mirrored in a `aggregator.category == "aggregator"` instance, set `category` to `lister` in the entry. @@ -165,15 +170,16 @@ To offer an easy way of assorting CSAF documents by criteria like document category, languages or values of the branch category within the product tree, ROLIE category values can be configured in `categories`. This can either -be done using an array of strings taken literally or, by prepending `"expr:"`. -The latter is evaluated as JSONPath and the result will be added into the +be done using an array of strings taken literally or, by prepending `"expr:"`. +The latter is evaluated as JSONPath and the result will be added into the categories document. For a more detailed explanation and examples, [refer to the provider config](csaf_provider.md#provider-options). - #### Example config file + + ```toml workers = 2 folder = "/var/csaf_aggregator" @@ -233,8 +239,8 @@ insecure = true category = "lister" # ignore_pattern = [".*white.*", ".*red.*"] ``` - + #### Publish others' advisories From 8feddc70e1c945e2cf2ec8cab92525aa8e89106d Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 23 Jul 2024 13:41:03 +0200 Subject: [PATCH 035/176] feat: no longer require to be root user to call setup scripts --- docs/scripts/Readme.md | 4 ++-- docs/scripts/setupValidationService.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/scripts/Readme.md b/docs/scripts/Readme.md index a3b932d..95f39b2 100644 --- a/docs/scripts/Readme.md +++ b/docs/scripts/Readme.md @@ -12,10 +12,10 @@ and configures nginx for serving TLS connections. As creating the folders needs to authenticate with the csaf_provider, the configurations of TLS server and Client certificate authentication should be set. So it is recommended to call the scripts in this order: `TLSConfigsForITest.sh`, `TLSClientConfigsForITest.sh`, `setupProviderForITest.sh` -Calling example (as root): +Calling example (as user with sudo privileges): ``` bash curl --fail -O https://raw.githubusercontent.com/csaf-poc/csaf_distribution/main/docs/scripts/prepareUbuntuInstanceForITests.sh - bash prepareUbuntuInstanceForITests.sh + sudo bash prepareUbuntuInstanceForITests.sh git clone https://github.com/csaf-poc/csaf_distribution.git # --branch pushd csaf_distribution/docs/scripts/ diff --git a/docs/scripts/setupValidationService.sh b/docs/scripts/setupValidationService.sh index d6f8ba7..4a7dfd7 100755 --- a/docs/scripts/setupValidationService.sh +++ b/docs/scripts/setupValidationService.sh @@ -21,7 +21,7 @@ echo ' remote_validator= { "url" = "http://localhost:8082", "presets" = ["mandatory"], "cache" = "/var/lib/csaf/validations.db" } ' | sudo tee --append /etc/csaf/config.toml -npm install pm2 -g +sudo npm install pm2 -g pushd ~ git clone https://github.com/secvisogram/csaf-validator-service.git From 9037574d967da7ad80972edde4b74810c735e11c Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 8 Aug 2024 12:17:58 +0200 Subject: [PATCH 036/176] Improve PGP fingerprint handling Warn if no fingerprint is specified and give more details, if fingerprint comparison fails. Closes #555 --- cmd/csaf_checker/processor.go | 9 +++++++-- cmd/csaf_downloader/downloader.go | 7 ++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 451a315..b5f949e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1449,7 +1449,7 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { } // checkPGPKeys checks if the OpenPGP keys are available and valid, fetches -// the the remotely keys and compares the fingerprints. +// the remotely keys and compares the fingerprints. // As a result of these a respective error messages are passed to badPGP method // in case of errors. It returns nil if all checks are passed. func (p *processor) checkPGPKeys(_ string) error { @@ -1518,8 +1518,13 @@ func (p *processor) checkPGPKeys(_ string) error { continue } + if key.Fingerprint == "" { + p.badPGPs.warn("No fingerprint for public OpenPGP key found.") + continue + } + if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { - p.badPGPs.error("Fingerprint of public OpenPGP key %s does not match remotely loaded.", u) + p.badPGPs.error("Given Fingerprint (%q) of public OpenPGP key %q does not match remotely loaded (%q).", string(key.Fingerprint), u, ckey.GetFingerprint()) continue } if p.keys == nil { diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index badf060..a5eeb71 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -366,10 +366,15 @@ func (d *downloader) loadOpenPGPKeys( continue } + if key.Fingerprint == "" { + slog.Warn("No fingerprint for public OpenPGP key found.") + continue + } + if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { slog.Warn( "Fingerprint of public OpenPGP key does not match remotely loaded", - "url", u) + "url", u, "fingerprint", key.Fingerprint, "remote-fingerprint", ckey.GetFingerprint()) continue } if d.keys == nil { From c2e24f7bbb1b49f5bcdd6163aad4b03e05398f31 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 6 Sep 2024 18:18:37 +0200 Subject: [PATCH 037/176] Remove check for empty fingerprint The schema validation already catches this error and this check will never run. --- cmd/csaf_checker/processor.go | 5 ----- cmd/csaf_downloader/downloader.go | 5 ----- 2 files changed, 10 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index b5f949e..d05a9ec 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1518,11 +1518,6 @@ func (p *processor) checkPGPKeys(_ string) error { continue } - if key.Fingerprint == "" { - p.badPGPs.warn("No fingerprint for public OpenPGP key found.") - continue - } - if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { p.badPGPs.error("Given Fingerprint (%q) of public OpenPGP key %q does not match remotely loaded (%q).", string(key.Fingerprint), u, ckey.GetFingerprint()) continue diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index a5eeb71..7e07449 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -366,11 +366,6 @@ func (d *downloader) loadOpenPGPKeys( continue } - if key.Fingerprint == "" { - slog.Warn("No fingerprint for public OpenPGP key found.") - continue - } - if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { slog.Warn( "Fingerprint of public OpenPGP key does not match remotely loaded", From 5231b3386b8126b248cc8cc9be451063caa17aab Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Sat, 7 Sep 2024 09:58:14 +0200 Subject: [PATCH 038/176] docs: improve code comment (minor) --- cmd/csaf_checker/processor.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index d05a9ec..c0034ca 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1449,9 +1449,9 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { } // checkPGPKeys checks if the OpenPGP keys are available and valid, fetches -// the remotely keys and compares the fingerprints. -// As a result of these a respective error messages are passed to badPGP method -// in case of errors. It returns nil if all checks are passed. +// the remote pubkeys and compares the fingerprints. +// As a result of these checks respective error messages are passed +// to badPGP methods. It returns nil if all checks are passed. func (p *processor) checkPGPKeys(_ string) error { p.badPGPs.use() From 37c9eaf3467acd8e7ad08dfb3a076cf9849c67cc Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Mon, 9 Sep 2024 10:25:13 +0200 Subject: [PATCH 039/176] Add CLI flags to specify what hash is preferred --- cmd/csaf_downloader/config.go | 12 +++++++- cmd/csaf_downloader/downloader.go | 50 ++++++++++++++----------------- 2 files changed, 34 insertions(+), 28 deletions(-) diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index dcfc090..71c5055 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -41,6 +41,13 @@ const ( validationUnsafe = validationMode("unsafe") ) +type hashAlgorithm string + +const ( + algSha256 = hashAlgorithm("SHA256") + algSha2512 = hashAlgorithm("SHA512") +) + type config struct { Directory string `short:"d" long:"directory" description:"DIRectory to store the downloaded files in" value-name:"DIR" toml:"directory"` Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider" toml:"insecure"` @@ -79,6 +86,9 @@ type config struct { clientCerts []tls.Certificate ignorePattern filter.PatternMatcher + + //lint:ignore SA5008 We are using choice or than once: sha256, sha512 + PreferredHash hashAlgorithm `long:"preferred_hash" short:"h" choice:"sha256" choice:"sha512" value-name:"HASH" description:"HASH to prefer" toml:"preferred_hash"` } // configPaths are the potential file locations of the config file. @@ -220,7 +230,7 @@ func (cfg *config) prepareLogging() error { w = f } ho := slog.HandlerOptions{ - //AddSource: true, + // AddSource: true, Level: cfg.LogLevel.Level, ReplaceAttr: dropSubSeconds, } diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 3bf3647..3cb7332 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -53,7 +53,6 @@ type downloader struct { const failedValidationDir = "failed_validation" func newDownloader(cfg *config) (*downloader, error) { - var validator csaf.RemoteValidator if cfg.RemoteValidator != "" { @@ -103,7 +102,6 @@ func logRedirect(req *http.Request, via []*http.Request) error { } func (d *downloader) httpClient() util.Client { - hClient := http.Client{} if d.cfg.verbose() { @@ -253,7 +251,6 @@ func (d *downloader) downloadFiles( label csaf.TLPLabel, files []csaf.AdvisoryFile, ) error { - var ( advisoryCh = make(chan csaf.AdvisoryFile) errorCh = make(chan error) @@ -303,7 +300,6 @@ func (d *downloader) loadOpenPGPKeys( base *url.URL, expr *util.PathEval, ) error { - src, err := expr.Eval("$.public_openpgp_keys", doc) if err != nil { // no keys. @@ -357,7 +353,6 @@ func (d *downloader) loadOpenPGPKeys( defer res.Body.Close() return crypto.NewKeyFromArmoredReader(res.Body) }() - if err != nil { slog.Warn( "Reading public OpenPGP key failed", @@ -501,31 +496,35 @@ nextAdvisory: signData []byte ) - // Only hash when we have a remote counterpart we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA256", - "url", file.SHA256URL(), - "error", err) + if (d.cfg.PreferredHash != "sha512" || file.SHA512URL() == "") && file.SHA256URL() != "" { + // Only hash when we have a remote counterpart we can compare it with. + if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + if !file.IsDirectory() { + slog.Warn("Cannot fetch SHA256", + "url", file.SHA256URL(), + "error", err) + } else { + slog.Info("SHA256 not present", "file", file.URL()) + } } else { - slog.Info("SHA256 not present", "file", file.URL()) + s256 = sha256.New() + writers = append(writers, s256) } - } else { - s256 = sha256.New() - writers = append(writers, s256) } - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA512", - "url", file.SHA512URL(), - "error", err) + if (d.cfg.PreferredHash != "sha256" || file.SHA256URL() == "") && file.SHA512URL() != "" { + if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + if !file.IsDirectory() { + slog.Warn("Cannot fetch SHA512", + "url", file.SHA512URL(), + "error", err) + } else { + slog.Info("SHA512 not present", "file", file.URL()) + } } else { - slog.Info("SHA512 not present", "file", file.URL()) + s512 = sha512.New() + writers = append(writers, s512) } - } else { - s512 = sha512.New() - writers = append(writers, s512) } // Remember the data as we need to store it to file later. @@ -757,9 +756,6 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, } func loadHash(client util.Client, p string) ([]byte, []byte, error) { - if p == "" { - return nil, nil, fmt.Errorf("no hash path provided") - } resp, err := client.Get(p) if err != nil { return nil, nil, err From c148a18dba7684b17af5306569d2b4a737332e3b Mon Sep 17 00:00:00 2001 From: 4echow <33332102+4echow@users.noreply.github.com> Date: Thu, 12 Sep 2024 09:27:12 +0200 Subject: [PATCH 040/176] docs:: fix miner typo in csaf_downloader.md --- docs/csaf_downloader.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 003ae4a..07c6e63 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -158,7 +158,7 @@ key protection mechanism based on RFC 1423, see Thus it considered experimental and most likely to be removed in a future release. Please only use this option, if you fully understand the security implications! -Note that for fully automated processes, it usually not make sense +Note that for fully automated processes, it usually does not make sense to protect the client certificate's private key with a passphrase. Because the passphrase has to be accessible to the process anyway to run unattented. In this situation the processing environment should be secured From f36c96e79864e9aea64ce6b1017521b2492b6492 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Mon, 9 Sep 2024 16:04:47 +0200 Subject: [PATCH 041/176] Upgrade to go v1.22 Closes #570 --- .github/workflows/itest.yml | 2 +- README.md | 2 +- docs/Development.md | 2 +- go.mod | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index eff11c2..364c330 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -9,7 +9,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v3 with: - go-version: 1.21.0 + go-version: 1.22.0 - name: Set up Node.js uses: actions/setup-node@v3 diff --git a/README.md b/README.md index bc9ae2a..14ac64f 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ Download the binaries from the most recent release assets on Github. ### Build from sources -- A recent version of **Go** (1.21+) should be installed. [Go installation](https://go.dev/doc/install) +- A recent version of **Go** (1.22+) should be installed. [Go installation](https://go.dev/doc/install) - Clone the repository `git clone https://github.com/csaf-poc/csaf_distribution.git ` diff --git a/docs/Development.md b/docs/Development.md index e7ce388..5c4df22 100644 --- a/docs/Development.md +++ b/docs/Development.md @@ -3,7 +3,7 @@ ## Supported Go versions We support the latest version and the one before -the latest version of Go (currently 1.22 and 1.21). +the latest version of Go (currently 1.22 and 1.23). ## Generated files diff --git a/go.mod b/go.mod index 5b33724..52f1f02 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/csaf-poc/csaf_distribution/v3 -go 1.21 +go 1.22 require ( github.com/BurntSushi/toml v1.3.2 From c0de0c2b6de4d0b739badfcbe6d259739af9cffa Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 27 Sep 2024 15:20:36 +0200 Subject: [PATCH 042/176] Check if hash present, before sending a request --- cmd/csaf_checker/processor.go | 44 +++++++++++------------------------ 1 file changed, 14 insertions(+), 30 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 38f3e34..ede8fd6 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -84,10 +84,8 @@ type reporter interface { report(*processor, *Domain) } -var ( - // errContinue indicates that the current check should continue. - errContinue = errors.New("continue") -) +// errContinue indicates that the current check should continue. +var errContinue = errors.New("continue") type whereType byte @@ -167,7 +165,6 @@ func (m *topicMessages) hasErrors() bool { // newProcessor returns an initialized processor. func newProcessor(cfg *config) (*processor, error) { - var validator csaf.RemoteValidator if cfg.RemoteValidator != "" { @@ -240,7 +237,6 @@ func (p *processor) reset() { // Then it calls the report method on each report from the given "reporters" parameter for each domain. // It returns a pointer to the report and nil, otherwise an error. func (p *processor) run(domains []string) (*Report, error) { - report := Report{ Date: ReportTime{Time: time.Now().UTC()}, Version: util.SemVersion, @@ -297,7 +293,6 @@ func (p *processor) run(domains []string) (*Report, error) { // fillMeta fills the report with extra informations from provider metadata. func (p *processor) fillMeta(domain *Domain) error { - if p.pmd == nil { return nil } @@ -323,7 +318,6 @@ func (p *processor) fillMeta(domain *Domain) error { // domainChecks compiles a list of checks which should be performed // for a given domain. func (p *processor) domainChecks(domain string) []func(*processor, string) error { - // If we have a direct domain url we dont need to // perform certain checks. direct := strings.HasPrefix(domain, "https://") @@ -393,7 +387,6 @@ func (p *processor) markChecked(s string, mask whereType) bool { } func (p *processor) checkRedirect(r *http.Request, via []*http.Request) error { - url := r.URL.String() p.checkTLS(url) if p.redirects == nil { @@ -495,7 +488,6 @@ func (p *processor) usedAuthorizedClient() bool { // rolieFeedEntries loads the references to the advisory files for a given feed. func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { - client := p.httpClient() res, err := client.Get(feed) p.badDirListings.use() @@ -546,7 +538,6 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { var files []csaf.AdvisoryFile rfeed.Entries(func(entry *csaf.Entry) { - // Filter if we have date checking. if accept := p.cfg.Range; accept != nil { if t := time.Time(entry.Updated); !t.IsZero() && !accept.Contains(t) { @@ -759,14 +750,20 @@ func (p *processor) integrity( // Check hashes p.badIntegrities.use() - for _, x := range []struct { + type hash struct { ext string url func() string hash []byte - }{ - {"SHA256", f.SHA256URL, s256.Sum(nil)}, - {"SHA512", f.SHA512URL, s512.Sum(nil)}, - } { + } + hashes := []hash{} + if f.SHA256URL() != "" { + hashes = append(hashes, hash{"SHA256", f.SHA256URL, s256.Sum(nil)}) + } + if f.SHA512URL() != "" { + hashes = append(hashes, hash{"SHA512", f.SHA512URL, s512.Sum(nil)}) + } + + for _, x := range hashes { hu, err := url.Parse(x.url()) if err != nil { lg(ErrorType, "Bad URL %s: %v", x.url(), err) @@ -918,7 +915,6 @@ func (p *processor) checkIndex(base string, mask whereType) error { // of the fields' values and if they are sorted properly. Then it passes the files to the // "integrity" functions. It returns error if some test fails, otherwise nil. func (p *processor) checkChanges(base string, mask whereType) error { - bu, err := url.Parse(base) if err != nil { return err @@ -978,8 +974,7 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] - times, files = - append(times, t), + times, files = append(times, t), append(files, csaf.DirectoryAdvisoryFile{Path: path}) } return times, files, nil @@ -1152,7 +1147,6 @@ func (p *processor) checkMissing(string) error { // checkInvalid goes over all found adivisories URLs and checks // if file name conforms to standard. func (p *processor) checkInvalid(string) error { - p.badDirListings.use() var invalids []string @@ -1174,7 +1168,6 @@ func (p *processor) checkInvalid(string) error { // checkListing goes over all found adivisories URLs and checks // if their parent directory is listable. func (p *processor) checkListing(string) error { - p.badDirListings.use() pgs := pages{} @@ -1209,7 +1202,6 @@ func (p *processor) checkListing(string) error { // checkWhitePermissions checks if the TLP:WHITE advisories are // available with unprotected access. func (p *processor) checkWhitePermissions(string) error { - var ids []string for id, open := range p.labelChecker.whiteAdvisories { if !open { @@ -1235,7 +1227,6 @@ func (p *processor) checkWhitePermissions(string) error { // According to the result, the respective error messages added to // badProviderMetadata. func (p *processor) checkProviderMetadata(domain string) bool { - p.badProviderMetadata.use() client := p.httpClient() @@ -1282,7 +1273,6 @@ func (p *processor) checkSecurity(domain string, legacy bool) (int, string) { // checkSecurityFolder checks the security.txt in a given folder. func (p *processor) checkSecurityFolder(folder string) string { - client := p.httpClient() path := folder + "security.txt" res, err := client.Get(path) @@ -1349,7 +1339,6 @@ func (p *processor) checkSecurityFolder(folder string) string { // and serves the "provider-metadata.json". // It returns an empty string if all checks are passed, otherwise the errormessage. func (p *processor) checkDNS(domain string) string { - client := p.httpClient() path := "https://csaf.data.security." + domain res, err := client.Get(path) @@ -1359,7 +1348,6 @@ func (p *processor) checkDNS(domain string) string { if res.StatusCode != http.StatusOK { return fmt.Sprintf("Fetching %s failed. Status code %d (%s)", path, res.StatusCode, res.Status) - } hash := sha256.New() defer res.Body.Close() @@ -1378,7 +1366,6 @@ func (p *processor) checkDNS(domain string) string { // available under the /.well-known/csaf/ directory. Returns the errormessage if // an error was encountered, or an empty string otherwise func (p *processor) checkWellknown(domain string) string { - client := p.httpClient() path := "https://" + domain + "/.well-known/csaf/provider-metadata.json" @@ -1408,7 +1395,6 @@ func (p *processor) checkWellknown(domain string) string { // The function returns nil, unless errors outside the checks were found. // In that case, errors are returned. func (p *processor) checkWellknownSecurityDNS(domain string) error { - warningsW := p.checkWellknown(domain) // Security check for well known (default) and legacy location warningsS, sDMessage := p.checkSecurity(domain, false) @@ -1461,7 +1447,6 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { // As a result of these a respective error messages are passed to badPGP method // in case of errors. It returns nil if all checks are passed. func (p *processor) checkPGPKeys(_ string) error { - p.badPGPs.use() src, err := p.expr.Eval("$.public_openpgp_keys", p.pmd) @@ -1520,7 +1505,6 @@ func (p *processor) checkPGPKeys(_ string) error { defer res.Body.Close() return crypto.NewKeyFromArmoredReader(res.Body) }() - if err != nil { p.badPGPs.error("Reading public OpenPGP key %s failed: %v", u, err) continue From f7dc3f5ec74ea8ccada62f64a15cd9d6f9fd8b72 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Sun, 29 Sep 2024 09:08:01 +0200 Subject: [PATCH 043/176] Use .test TLD for integration setup (#577) .local is reserved for local-area networks, and .localhost is reserved for loopback devices. Using .test allows easier usage for different test setups. * https://www.rfc-editor.org/rfc/rfc2606#section-2 defines the "test." top level domain and "localhost.". * https://www.rfc-editor.org/rfc/rfc6761.html#section-6.2 explains how different implementations can use "test.". --- docs/development-ca.md | 2 +- docs/scripts/setupProviderForITest.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/development-ca.md b/docs/development-ca.md index 483732c..21f4ef4 100644 --- a/docs/development-ca.md +++ b/docs/development-ca.md @@ -55,7 +55,7 @@ signing_key encryption_key non_repudiation -dns_name = "*.local" +dns_name = "*.test" dns_name = "localhost" serial = 010 diff --git a/docs/scripts/setupProviderForITest.sh b/docs/scripts/setupProviderForITest.sh index 1a57f1e..f9d7d18 100755 --- a/docs/scripts/setupProviderForITest.sh +++ b/docs/scripts/setupProviderForITest.sh @@ -17,7 +17,7 @@ sudo chgrp -R www-data /var/www sudo chmod -R g+ws /var/www export NGINX_CONFIG_PATH=/etc/nginx/sites-available/default -export DNS_NAME=csaf.data.security.localhost +export DNS_NAME=csaf.data.security.test sudo cp /usr/share/doc/fcgiwrap/examples/nginx.conf /etc/nginx/fcgiwrap.conf From 18e2e35e7cf0d92d463eaad736074c5c9d43165b Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Tue, 29 Oct 2024 09:49:27 +0100 Subject: [PATCH 044/176] Update README.md with link update alert --- README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/README.md b/README.md index 14ac64f..1953854 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,13 @@ Software-Engineering: 2024 Intevation GmbH --> + +> [!IMPORTANT] +> Adjust your links and go module paths if you still have `csaf-poc` in them, to avoid future breakage. +> (This repository was moved here on 2024-10-28. The old one is decrecated +> and redirection will be switched off in a few months.) + + # csaf_distribution Implements a [CSAF](https://csaf.io/) @@ -16,6 +23,7 @@ and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. + ## Tools for users ### [csaf_downloader](docs/csaf_downloader.md) is a tool for downloading advisories from a provider. From bdd8aa0a9415da3641cf2624ac0f57381e16b9b2 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Tue, 29 Oct 2024 09:50:26 +0100 Subject: [PATCH 045/176] Update README.md --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 1953854..e6ea77f 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,6 @@ and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. - ## Tools for users ### [csaf_downloader](docs/csaf_downloader.md) is a tool for downloading advisories from a provider. From bf057e2fa8f25e155bb616ebe98523c0f76e5148 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 30 Oct 2024 10:51:38 +0100 Subject: [PATCH 046/176] Update repo move alert in README.md HTML links can be adjusted right now, go module paths will have to wait a bit. --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index e6ea77f..53920d8 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,9 @@ > [!IMPORTANT] -> Adjust your links and go module paths if you still have `csaf-poc` in them, to avoid future breakage. +> To avoid future breakage, if you still use `csaf-poc`: +> 1. Adjust your HTML links. +> 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. > (This repository was moved here on 2024-10-28. The old one is decrecated > and redirection will be switched off in a few months.) From 6ebe7f5f5d3845cc1c9fa26d209de12b4870150a Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 30 Oct 2024 10:53:15 +0100 Subject: [PATCH 047/176] Update repo move alert in README.md use a better phrasing --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 53920d8..f28567e 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ > 1. Adjust your HTML links. > 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. > (This repository was moved here on 2024-10-28. The old one is decrecated -> and redirection will be switched off in a few months.) +> and redirection will be switched off a few months later.) # csaf_distribution From 7aa95c03ca1f5a19914cce0158fb3212cab80d19 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Wed, 30 Oct 2024 11:03:18 +0100 Subject: [PATCH 048/176] fix: bring aggregator schema to errata01 (#583) --- csaf/schema/aggregator_json_schema.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/csaf/schema/aggregator_json_schema.json b/csaf/schema/aggregator_json_schema.json index 7929f1f..cdad109 100644 --- a/csaf/schema/aggregator_json_schema.json +++ b/csaf/schema/aggregator_json_schema.json @@ -175,7 +175,7 @@ "type": "object", "required": [ "metadata", - "mirror", + "mirrors", "update_interval" ], "properties": { From 1aad5331d2d8d992467e8b5694c43f53dae2d22b Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 30 Oct 2024 11:15:31 +0100 Subject: [PATCH 049/176] Update README.md reformat a bit --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index f28567e..8bdfd88 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ > To avoid future breakage, if you still use `csaf-poc`: > 1. Adjust your HTML links. > 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. +> > (This repository was moved here on 2024-10-28. The old one is decrecated > and redirection will be switched off a few months later.) From 1c860a1ab21692f176ecc033fc484dcebc9f5728 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Wed, 30 Oct 2024 11:22:24 +0100 Subject: [PATCH 050/176] Update README.md: Fix: typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8bdfd88..568bf03 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ > 1. Adjust your HTML links. > 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. > -> (This repository was moved here on 2024-10-28. The old one is decrecated +> (This repository was moved here on 2024-10-28. The old one is deprecated > and redirection will be switched off a few months later.) From ffadad38c6cc9aa9b29af2489ea4487d676e0f34 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Wed, 30 Oct 2024 15:53:22 +0100 Subject: [PATCH 051/176] improve test setupscript by adding missing zip Add zip as packages to be installed in preparation as the `make dist` target uses it. --- docs/scripts/prepareUbuntuInstanceForITests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scripts/prepareUbuntuInstanceForITests.sh b/docs/scripts/prepareUbuntuInstanceForITests.sh index ea88fc4..f124044 100755 --- a/docs/scripts/prepareUbuntuInstanceForITests.sh +++ b/docs/scripts/prepareUbuntuInstanceForITests.sh @@ -6,7 +6,7 @@ set -e # by installing the required packages. apt update -apt install -y make bash curl gnupg sed tar git nginx fcgiwrap gnutls-bin +apt install -y make bash curl gnupg sed tar git nginx fcgiwrap gnutls-bin zip # Install Go from binary distribution latest_go="$(curl https://go.dev/VERSION\?m=text| head -1).linux-amd64.tar.gz" From e8706e5eb99d40f464587c6d3aba2e2484a3fd6a Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Wed, 30 Oct 2024 14:46:50 +0100 Subject: [PATCH 052/176] feat: perform go path repo move * Change the go module path from github.com/csaf-poc/csaf_distribution to github.com/gocsaf/csaf. * Rename archive for release tarballs. * Adjust testing scripts and documentation. --- .github/workflows/itest.yml | 2 +- .github/workflows/release.yml | 4 ++-- Makefile | 6 +++--- README.md | 10 +++++----- cmd/csaf_aggregator/client.go | 2 +- cmd/csaf_aggregator/config.go | 12 ++++++------ cmd/csaf_aggregator/full.go | 4 ++-- cmd/csaf_aggregator/indices.go | 4 ++-- cmd/csaf_aggregator/interim.go | 4 ++-- cmd/csaf_aggregator/lazytransaction.go | 2 +- cmd/csaf_aggregator/lister.go | 4 ++-- cmd/csaf_aggregator/main.go | 2 +- cmd/csaf_aggregator/mirror.go | 4 ++-- cmd/csaf_aggregator/processor.go | 4 ++-- cmd/csaf_checker/config.go | 8 ++++---- cmd/csaf_checker/links.go | 2 +- cmd/csaf_checker/main.go | 2 +- cmd/csaf_checker/processor.go | 4 ++-- cmd/csaf_checker/report.go | 4 ++-- cmd/csaf_checker/reporters.go | 2 +- cmd/csaf_checker/roliecheck.go | 4 ++-- cmd/csaf_checker/rules.go | 2 +- cmd/csaf_downloader/config.go | 8 ++++---- cmd/csaf_downloader/downloader.go | 4 ++-- cmd/csaf_downloader/forwarder.go | 4 ++-- cmd/csaf_downloader/forwarder_test.go | 4 ++-- cmd/csaf_downloader/main.go | 2 +- cmd/csaf_provider/actions.go | 4 ++-- cmd/csaf_provider/config.go | 2 +- cmd/csaf_provider/create.go | 4 ++-- cmd/csaf_provider/files.go | 2 +- cmd/csaf_provider/indices.go | 2 +- cmd/csaf_provider/main.go | 2 +- cmd/csaf_provider/rolie.go | 4 ++-- cmd/csaf_provider/transaction.go | 4 ++-- cmd/csaf_uploader/config.go | 4 ++-- cmd/csaf_uploader/main.go | 2 +- cmd/csaf_uploader/processor.go | 6 +++--- cmd/csaf_validator/main.go | 4 ++-- csaf/advisories.go | 2 +- csaf/models.go | 2 +- csaf/providermetaloader.go | 2 +- csaf/rolie.go | 2 +- csaf/summary.go | 2 +- docs/csaf_checker.md | 2 +- docs/csaf_provider.md | 4 ++-- docs/provider-setup.md | 2 +- docs/scripts/Readme.md | 8 ++++---- docs/scripts/TLSClientConfigsForITest.sh | 2 +- docs/scripts/TLSConfigsForITest.sh | 2 +- docs/scripts/prepareUbuntuInstanceForITests.sh | 2 +- docs/scripts/testAggregator.sh | 2 +- docs/scripts/testChecker.sh | 2 +- docs/scripts/testDownloader.sh | 2 +- examples/README.md | 2 +- examples/purls_searcher/main.go | 6 +++--- go.mod | 2 +- internal/options/options.go | 2 +- 58 files changed, 102 insertions(+), 102 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index 364c330..9cc4c6b 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -25,7 +25,7 @@ jobs: sudo apt install -y make nginx fcgiwrap gnutls-bin cp -r $GITHUB_WORKSPACE ~ cd ~ - cd csaf_distribution/docs/scripts/ + cd csaf/docs/scripts/ # keep in sync with docs/scripts/Readme.md export FOLDERNAME=devca1 ORGANAME="CSAF Tools Development (internal)" source ./TLSConfigsForITest.sh diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 739f45c..4bcd6ba 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -24,5 +24,5 @@ jobs: uses: softprops/action-gh-release@v1 with: files: | - dist/csaf_distribution-*.zip - dist/csaf_distribution-*.tar.gz + dist/csaf-*.zip + dist/csaf-*.tar.gz diff --git a/Makefile b/Makefile index b4b3964..083d3b6 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ # SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) # Software-Engineering: 2021 Intevation GmbH # -# Makefile to build csaf_distribution components +# Makefile to build csaf components SHELL = /bin/bash BUILD = go build @@ -59,7 +59,7 @@ testsemver: # Set -ldflags parameter to pass the semversion. -LDFLAGS = -ldflags "-X github.com/csaf-poc/csaf_distribution/v3/util.SemVersion=$(SEMVER)" +LDFLAGS = -ldflags "-X github.com/gocsaf/csaf/v3/util.SemVersion=$(SEMVER)" # Build binaries and place them under bin-$(GOOS)-$(GOARCH) # Using 'Target-specific Variable Values' to specify the build target system @@ -78,7 +78,7 @@ build_linux build_win build_mac_amd64 build_mac_arm64: env GOARCH=$(GOARCH) GOOS=$(GOOS) $(BUILD) -o $(BINDIR) $(LDFLAGS) -v ./cmd/... -DISTDIR := csaf_distribution-$(SEMVER) +DISTDIR := csaf-$(SEMVER) dist: build_linux build_win build_mac_amd64 build_mac_arm64 mkdir -p dist mkdir -p dist/$(DISTDIR)-windows-amd64/bin-windows-amd64 diff --git a/README.md b/README.md index 568bf03..cec9248 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ > and redirection will be switched off a few months later.) -# csaf_distribution +# csaf Implements a [CSAF](https://csaf.io/) ([specification v2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) @@ -52,10 +52,10 @@ is a CSAF Aggregator, to list or mirror providers. ## Other stuff ### [examples](./examples/README.md) -are small examples of how to use `github.com/csaf-poc/csaf_distribution` +are small examples of how to use `github.com/gocsaf/csaf` as an API. Currently this is a work in progress, as usage of this repository as a library to access is _not officially supported_, e.g. -see https://github.com/csaf-poc/csaf_distribution/issues/367 . +see https://github.com/gocsaf/csaf/issues/367 . ## Setup Binaries for the server side are only available and tested @@ -81,7 +81,7 @@ Download the binaries from the most recent release assets on Github. - A recent version of **Go** (1.22+) should be installed. [Go installation](https://go.dev/doc/install) -- Clone the repository `git clone https://github.com/csaf-poc/csaf_distribution.git ` +- Clone the repository `git clone https://github.com/gocsaf/csaf.git ` - Build Go components Makefile supplies the following targets: - Build for GNU/Linux system: `make build_linux` @@ -110,7 +110,7 @@ For further details of the development process consult our [development page](./ ## License -- `csaf_distribution` is licensed as Free Software under the terms of the [Apache License, Version 2.0](./LICENSES/Apache-2.0.txt). +- `csaf` is licensed as Free Software under the terms of the [Apache License, Version 2.0](./LICENSES/Apache-2.0.txt). - See the specific source files for details, the license itself can be found in the directory `LICENSES/`. diff --git a/cmd/csaf_aggregator/client.go b/cmd/csaf_aggregator/client.go index 8200d34..916baa5 100644 --- a/cmd/csaf_aggregator/client.go +++ b/cmd/csaf_aggregator/client.go @@ -13,7 +13,7 @@ import ( "io" "net/http" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) var errNotFound = errors.New("not found") diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index b73286c..81db0b7 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -20,12 +20,12 @@ import ( "time" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/filter" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/filter" + "github.com/gocsaf/csaf/v3/internal/models" + "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/util" "golang.org/x/time/rate" ) diff --git a/cmd/csaf_aggregator/full.go b/cmd/csaf_aggregator/full.go index 9ec9812..e71d7b6 100644 --- a/cmd/csaf_aggregator/full.go +++ b/cmd/csaf_aggregator/full.go @@ -18,8 +18,8 @@ import ( "sync" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type fullJob struct { diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 272d25b..17c8d3a 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -19,8 +19,8 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) const ( diff --git a/cmd/csaf_aggregator/interim.go b/cmd/csaf_aggregator/interim.go index 023c9c4..94147bc 100644 --- a/cmd/csaf_aggregator/interim.go +++ b/cmd/csaf_aggregator/interim.go @@ -24,8 +24,8 @@ import ( "sync" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type interimJob struct { diff --git a/cmd/csaf_aggregator/lazytransaction.go b/cmd/csaf_aggregator/lazytransaction.go index 606d892..af36ee2 100644 --- a/cmd/csaf_aggregator/lazytransaction.go +++ b/cmd/csaf_aggregator/lazytransaction.go @@ -13,7 +13,7 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type lazyTransaction struct { diff --git a/cmd/csaf_aggregator/lister.go b/cmd/csaf_aggregator/lister.go index 4d758e4..7e1fb58 100644 --- a/cmd/csaf_aggregator/lister.go +++ b/cmd/csaf_aggregator/lister.go @@ -11,8 +11,8 @@ package main import ( "fmt" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // mirrorAllowed checks if mirroring is allowed. diff --git a/cmd/csaf_aggregator/main.go b/cmd/csaf_aggregator/main.go index 39c1051..2056e84 100644 --- a/cmd/csaf_aggregator/main.go +++ b/cmd/csaf_aggregator/main.go @@ -15,7 +15,7 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/options" "github.com/gofrs/flock" ) diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 6bf72a3..c90ef68 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -30,8 +30,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/constants" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // mirrorAllowed checks if mirroring is allowed. diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index 5cb3628..b22e839 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -14,8 +14,8 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" "github.com/ProtonMail/gopenpgp/v2/crypto" ) diff --git a/cmd/csaf_checker/config.go b/cmd/csaf_checker/config.go index ac9ce62..3ea1840 100644 --- a/cmd/csaf_checker/config.go +++ b/cmd/csaf_checker/config.go @@ -13,10 +13,10 @@ import ( "fmt" "net/http" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/filter" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/filter" + "github.com/gocsaf/csaf/v3/internal/models" + "github.com/gocsaf/csaf/v3/internal/options" ) type outputFormat string diff --git a/cmd/csaf_checker/links.go b/cmd/csaf_checker/links.go index 0456ace..a323661 100644 --- a/cmd/csaf_checker/links.go +++ b/cmd/csaf_checker/links.go @@ -15,7 +15,7 @@ import ( "github.com/PuerkitoBio/goquery" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type ( diff --git a/cmd/csaf_checker/main.go b/cmd/csaf_checker/main.go index 752fdf8..4efb351 100644 --- a/cmd/csaf_checker/main.go +++ b/cmd/csaf_checker/main.go @@ -12,7 +12,7 @@ package main import ( "log" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/options" ) // run uses a processor to check all the given domains or direct urls diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index da4214b..5fd3fbd 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -32,8 +32,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/time/rate" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // topicMessages stores the collected topicMessages for a specific topic. diff --git a/cmd/csaf_checker/report.go b/cmd/csaf_checker/report.go index 9b5251b..58ed25a 100644 --- a/cmd/csaf_checker/report.go +++ b/cmd/csaf_checker/report.go @@ -18,8 +18,8 @@ import ( "os" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/models" ) // MessageType is the kind of the message. diff --git a/cmd/csaf_checker/reporters.go b/cmd/csaf_checker/reporters.go index 016d371..157eabe 100644 --- a/cmd/csaf_checker/reporters.go +++ b/cmd/csaf_checker/reporters.go @@ -13,7 +13,7 @@ import ( "sort" "strings" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type ( diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index 53d1150..28bd437 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -15,8 +15,8 @@ import ( "sort" "strings" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // identifier consist of document/tracking/id and document/publisher/namespace, diff --git a/cmd/csaf_checker/rules.go b/cmd/csaf_checker/rules.go index eadbbb2..e04388d 100644 --- a/cmd/csaf_checker/rules.go +++ b/cmd/csaf_checker/rules.go @@ -12,7 +12,7 @@ import ( "fmt" "sort" - "github.com/csaf-poc/csaf_distribution/v3/csaf" + "github.com/gocsaf/csaf/v3/csaf" ) type ruleCondition int diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index dcfc090..33f8dc2 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -19,10 +19,10 @@ import ( "path/filepath" "time" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/filter" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/filter" + "github.com/gocsaf/csaf/v3/internal/models" + "github.com/gocsaf/csaf/v3/internal/options" ) const ( diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index fde4cd3..f21fcc0 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -33,8 +33,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/time/rate" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type downloader struct { diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index c3681eb..12d9fe4 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -19,8 +19,8 @@ import ( "path/filepath" "strings" - "github.com/csaf-poc/csaf_distribution/v3/internal/misc" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/internal/misc" + "github.com/gocsaf/csaf/v3/util" ) // failedForwardDir is the name of the special sub folder diff --git a/cmd/csaf_downloader/forwarder_test.go b/cmd/csaf_downloader/forwarder_test.go index 907bbce..25f0f1f 100644 --- a/cmd/csaf_downloader/forwarder_test.go +++ b/cmd/csaf_downloader/forwarder_test.go @@ -23,8 +23,8 @@ import ( "strings" "testing" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/util" ) func TestValidationStatusUpdate(t *testing.T) { diff --git a/cmd/csaf_downloader/main.go b/cmd/csaf_downloader/main.go index cc284bb..fe6efd1 100644 --- a/cmd/csaf_downloader/main.go +++ b/cmd/csaf_downloader/main.go @@ -15,7 +15,7 @@ import ( "os" "os/signal" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/options" ) func run(cfg *config, domains []string) error { diff --git a/cmd/csaf_provider/actions.go b/cmd/csaf_provider/actions.go index 8f385e6..1862983 100644 --- a/cmd/csaf_provider/actions.go +++ b/cmd/csaf_provider/actions.go @@ -26,8 +26,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/constants" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) const dateFormat = time.RFC3339 diff --git a/cmd/csaf_provider/config.go b/cmd/csaf_provider/config.go index 49a7204..826b7bf 100644 --- a/cmd/csaf_provider/config.go +++ b/cmd/csaf_provider/config.go @@ -18,7 +18,7 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/crypto/bcrypt" - "github.com/csaf-poc/csaf_distribution/v3/csaf" + "github.com/gocsaf/csaf/v3/csaf" ) const ( diff --git a/cmd/csaf_provider/create.go b/cmd/csaf_provider/create.go index 56893c6..11e0b7c 100644 --- a/cmd/csaf_provider/create.go +++ b/cmd/csaf_provider/create.go @@ -22,8 +22,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // ensureFolders initializes the paths and call functions to create diff --git a/cmd/csaf_provider/files.go b/cmd/csaf_provider/files.go index 39a97e3..3b99ff5 100644 --- a/cmd/csaf_provider/files.go +++ b/cmd/csaf_provider/files.go @@ -13,7 +13,7 @@ import ( "crypto/sha512" "os" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) func writeHashedFile(fname, name string, data []byte, armored string) error { diff --git a/cmd/csaf_provider/indices.go b/cmd/csaf_provider/indices.go index 805371b..a4eb97a 100644 --- a/cmd/csaf_provider/indices.go +++ b/cmd/csaf_provider/indices.go @@ -18,7 +18,7 @@ import ( "sort" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) func updateIndex(dir, fname string) error { diff --git a/cmd/csaf_provider/main.go b/cmd/csaf_provider/main.go index 8740e81..6c858c9 100644 --- a/cmd/csaf_provider/main.go +++ b/cmd/csaf_provider/main.go @@ -18,7 +18,7 @@ import ( "github.com/jessevdk/go-flags" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type options struct { diff --git a/cmd/csaf_provider/rolie.go b/cmd/csaf_provider/rolie.go index 98448bd..d9717b1 100644 --- a/cmd/csaf_provider/rolie.go +++ b/cmd/csaf_provider/rolie.go @@ -15,8 +15,8 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // mergeCategories merges the given categories into the old ones. diff --git a/cmd/csaf_provider/transaction.go b/cmd/csaf_provider/transaction.go index 1b66ae0..c4c93a8 100644 --- a/cmd/csaf_provider/transaction.go +++ b/cmd/csaf_provider/transaction.go @@ -12,8 +12,8 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) func doTransaction( diff --git a/cmd/csaf_uploader/config.go b/cmd/csaf_uploader/config.go index a83361c..ceecff7 100644 --- a/cmd/csaf_uploader/config.go +++ b/cmd/csaf_uploader/config.go @@ -18,8 +18,8 @@ import ( "golang.org/x/crypto/bcrypt" "golang.org/x/term" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/options" ) const ( diff --git a/cmd/csaf_uploader/main.go b/cmd/csaf_uploader/main.go index 20f89fd..db1cef4 100644 --- a/cmd/csaf_uploader/main.go +++ b/cmd/csaf_uploader/main.go @@ -9,7 +9,7 @@ // Implements a command line tool that uploads csaf documents to csaf_provider. package main -import "github.com/csaf-poc/csaf_distribution/v3/internal/options" +import "github.com/gocsaf/csaf/v3/internal/options" func main() { args, cfg, err := parseArgsConfig() diff --git a/cmd/csaf_uploader/processor.go b/cmd/csaf_uploader/processor.go index 4598865..b57cafb 100644 --- a/cmd/csaf_uploader/processor.go +++ b/cmd/csaf_uploader/processor.go @@ -26,9 +26,9 @@ import ( "github.com/ProtonMail/gopenpgp/v2/constants" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/internal/misc" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/misc" + "github.com/gocsaf/csaf/v3/util" ) type processor struct { diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index f6aecc4..b07c2f4 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -18,8 +18,8 @@ import ( "github.com/jessevdk/go-flags" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type options struct { diff --git a/csaf/advisories.go b/csaf/advisories.go index 6f07648..c51c84c 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -19,7 +19,7 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // AdvisoryFile constructs the urls of a remote file. diff --git a/csaf/models.go b/csaf/models.go index c7e507d..c4b132d 100644 --- a/csaf/models.go +++ b/csaf/models.go @@ -17,7 +17,7 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // TLPLabel is the traffic light policy of the CSAF. diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index b21ddc6..b28b606 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -18,7 +18,7 @@ import ( "net/http" "strings" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // ProviderMetadataLoader helps load provider-metadata.json from diff --git a/csaf/rolie.go b/csaf/rolie.go index c2b5b08..b94cfa3 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -14,7 +14,7 @@ import ( "sort" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // ROLIEServiceWorkspaceCollectionCategoriesCategory is a category in a ROLIE service collection. diff --git a/csaf/summary.go b/csaf/summary.go index 72d2faf..b10dd65 100644 --- a/csaf/summary.go +++ b/csaf/summary.go @@ -11,7 +11,7 @@ package csaf import ( "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) const ( diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index 0b223b6..5152501 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -93,7 +93,7 @@ ignorepattern = [".*white.*", ".*red.*"] The `role` given in the `provider-metadata.json` is not yet considered to change the overall result, -see . +see . If a provider hosts one or more advisories with a TLP level of AMBER or RED, then these advisories must be access protected. To check these advisories, authorization can be given via custom headers or certificates. diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index 81a45fa..b88924d 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -141,5 +141,5 @@ contact_details = "Example Company can be reached at contact_us@example.com, or There is an experimental upload interface which works with a web browser. It is disabled by default, as there are known issues, notably: - * https://github.com/csaf-poc/csaf_distribution/issues/43 - * https://github.com/csaf-poc/csaf_distribution/issues/256 + * https://github.com/gocsaf/csaf/issues/43 + * https://github.com/gocsaf/csaf/issues/256 diff --git a/docs/provider-setup.md b/docs/provider-setup.md index 3f07fd0..48c29d0 100644 --- a/docs/provider-setup.md +++ b/docs/provider-setup.md @@ -115,7 +115,7 @@ sudo chmod g+r,o-rwx /etc/csaf/config.toml Here is a minimal example configuration, which you need to customize for a production setup, -see the [options of `csaf_provider`](https://github.com/csaf-poc/csaf_distribution/blob/main/docs/csaf_provider.md). +see the [options of `csaf_provider`](https://github.com/gocsaf/csaf/blob/main/docs/csaf_provider.md). diff --git a/docs/scripts/Readme.md b/docs/scripts/Readme.md index 95f39b2..77e8dae 100644 --- a/docs/scripts/Readme.md +++ b/docs/scripts/Readme.md @@ -1,7 +1,7 @@ Scripts for assisting the Integration tests. They were written on Ubuntu 20.04 TLS amd64 and also tested with 24.04 TLS. -- `prepareUbuntuInstanceForITests.sh` installs the required packages for the csaf_distribution integration tests on a naked ubuntu LTS amd64. +- `prepareUbuntuInstanceForITests.sh` installs the required packages for the csaf integration tests on a naked Ubuntu LTS amd64. - `TLSConfigsForITest.sh` generates a root CA and webserver cert by running `createRootCAForITest.sh` and `createWebserverCertForITest.sh` and configures nginx for serving TLS connections. @@ -14,11 +14,11 @@ As creating the folders needs to authenticate with the csaf_provider, the config Calling example (as user with sudo privileges): ``` bash - curl --fail -O https://raw.githubusercontent.com/csaf-poc/csaf_distribution/main/docs/scripts/prepareUbuntuInstanceForITests.sh + curl --fail -O https://raw.githubusercontent.com/gocsaf/csaf/main/docs/scripts/prepareUbuntuInstanceForITests.sh sudo bash prepareUbuntuInstanceForITests.sh - git clone https://github.com/csaf-poc/csaf_distribution.git # --branch - pushd csaf_distribution/docs/scripts/ + git clone https://github.com/gocsaf/csaf.git # --branch + pushd csaf/docs/scripts/ export FOLDERNAME=devca1 ORGANAME="CSAF Tools Development (internal)" source ./TLSConfigsForITest.sh diff --git a/docs/scripts/TLSClientConfigsForITest.sh b/docs/scripts/TLSClientConfigsForITest.sh index 1f94117..830666f 100755 --- a/docs/scripts/TLSClientConfigsForITest.sh +++ b/docs/scripts/TLSClientConfigsForITest.sh @@ -18,7 +18,7 @@ set -e NGINX_CONFIG_PATH=/etc/nginx/sites-available/default -cd ~/csaf_distribution/docs/scripts/ +cd ~/csaf/docs/scripts/ source ./createCCForITest.sh echo ' diff --git a/docs/scripts/TLSConfigsForITest.sh b/docs/scripts/TLSConfigsForITest.sh index c1a5420..d7c06f9 100644 --- a/docs/scripts/TLSConfigsForITest.sh +++ b/docs/scripts/TLSConfigsForITest.sh @@ -17,7 +17,7 @@ set -e NGINX_CONFIG_PATH=/etc/nginx/sites-available/default -cd ~/csaf_distribution/docs/scripts/ +cd ~/csaf/docs/scripts/ ## Create Root CA ./createRootCAForITest.sh diff --git a/docs/scripts/prepareUbuntuInstanceForITests.sh b/docs/scripts/prepareUbuntuInstanceForITests.sh index ea88fc4..75ce44b 100755 --- a/docs/scripts/prepareUbuntuInstanceForITests.sh +++ b/docs/scripts/prepareUbuntuInstanceForITests.sh @@ -2,7 +2,7 @@ set -e # This script prepares a naked Ubuntu LTS amd64 -# for the csaf_distribution integration tests +# for the csaf integration tests # by installing the required packages. apt update diff --git a/docs/scripts/testAggregator.sh b/docs/scripts/testAggregator.sh index 366ac07..f6322f6 100755 --- a/docs/scripts/testAggregator.sh +++ b/docs/scripts/testAggregator.sh @@ -29,6 +29,6 @@ popd echo echo '=== run aggregator' -cd ~/csaf_distribution/ +cd ~/csaf/ sudo cp docs/examples/aggregator.toml /etc/csaf sudo ./bin-linux-amd64/csaf_aggregator -c /etc/csaf/aggregator.toml diff --git a/docs/scripts/testChecker.sh b/docs/scripts/testChecker.sh index cb45aad..28474d0 100755 --- a/docs/scripts/testChecker.sh +++ b/docs/scripts/testChecker.sh @@ -11,7 +11,7 @@ set -e # to exit if a command in the script fails echo '==== run checker (twice)' -cd ~/csaf_distribution +cd ~/csaf ./bin-linux-amd64/csaf_checker -f html -o ../checker-results.html --insecure \ --client_cert ~/devca1/testclient1.crt \ diff --git a/docs/scripts/testDownloader.sh b/docs/scripts/testDownloader.sh index c4b9bce..6326536 100755 --- a/docs/scripts/testDownloader.sh +++ b/docs/scripts/testDownloader.sh @@ -10,7 +10,7 @@ set -e # to exit if a command in the script fails -cd ~/csaf_distribution +cd ~/csaf echo echo '==== run downloader (1)' diff --git a/examples/README.md b/examples/README.md index a70ea09..c525e96 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,6 +1,6 @@ # API examples -An experimental example of how to use `github.com/csaf-poc/csaf_distribution` +An experimental example of how to use `github.com/gocsaf/csaf` as a library. As usage of the repository as an API is currently a _work in progress_, these examples are likely to be changed. diff --git a/examples/purls_searcher/main.go b/examples/purls_searcher/main.go index c1ec3e1..72fb976 100644 --- a/examples/purls_searcher/main.go +++ b/examples/purls_searcher/main.go @@ -1,5 +1,5 @@ // Package main implements a simple demo program to -// work with the csaf_distribution library. +// work with the csaf library. package main import ( @@ -9,8 +9,8 @@ import ( "os" "strings" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) func main() { diff --git a/go.mod b/go.mod index 52f1f02..c8101f0 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/csaf-poc/csaf_distribution/v3 +module github.com/gocsaf/csaf/v3 go 1.22 diff --git a/internal/options/options.go b/internal/options/options.go index c0ad2bc..3a4867f 100644 --- a/internal/options/options.go +++ b/internal/options/options.go @@ -15,7 +15,7 @@ import ( "log/slog" "os" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" "github.com/BurntSushi/toml" "github.com/jessevdk/go-flags" From ace8aeaf985517cca2d3ba4b4a17db4e0f048021 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 15 Nov 2024 11:46:26 +0100 Subject: [PATCH 053/176] fix: build-in version for release tags * Change Makefile to remove the leading `v` from the git tag in the case of release tags. Previously this was only done for pre-release git tags. --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 083d3b6..04ec866 100644 --- a/Makefile +++ b/Makefile @@ -47,13 +47,13 @@ tag_checked_out: # In this case we might in some situations see an error like # `/bin/bash: line 1: 2b55bbb: value too great for base (error token is "2b55bbb")` # which can be ignored. -GITDESC := $(shell git describe --tags --always) -GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/v?[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') +GITDESC := $(shell git describe --tags --always | sed -E 's/^v//') +GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') SEMVERPATCH := $(shell echo $$(( $(GITDESCPATCH) + 1 ))) # Hint: The regexp in the next line only matches if there is a hyphen (`-`) # followed by a number, by which we assume that git describe # has added a string after the tag -SEMVER := $(shell echo '$(GITDESC)' | sed -E 's/v?([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) +SEMVER := $(shell echo '$(GITDESC)' | sed -E 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) testsemver: @echo from \'$(GITDESC)\' transformed to \'$(SEMVER)\' From 1e3504c7539fd6dac3e7ffdb2c35cb1111153299 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 15 Nov 2024 12:12:24 +0100 Subject: [PATCH 054/176] improve Makefile improvement --- Makefile | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Makefile b/Makefile index 04ec866..163ace5 100644 --- a/Makefile +++ b/Makefile @@ -41,19 +41,19 @@ tag_checked_out: # into a semver version. For this we increase the PATCH number, so that # any commit after a tag is considered newer than the semver from the tag # without an optional 'v' -# Note we need `--tags` because github release only creates lightweight tags +# Note we need `--tags` because github releases only create lightweight tags # (see feature request https://github.com/github/feedback/discussions/4924). # We use `--always` in case of being run as github action with shallow clone. # In this case we might in some situations see an error like # `/bin/bash: line 1: 2b55bbb: value too great for base (error token is "2b55bbb")` # which can be ignored. -GITDESC := $(shell git describe --tags --always | sed -E 's/^v//') -GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') +GITDESC := $(shell git describe --tags --always) +GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/v?[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') SEMVERPATCH := $(shell echo $$(( $(GITDESCPATCH) + 1 ))) -# Hint: The regexp in the next line only matches if there is a hyphen (`-`) -# followed by a number, by which we assume that git describe -# has added a string after the tag -SEMVER := $(shell echo '$(GITDESC)' | sed -E 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) +# Hint: The second regexp in the next line only matches +# if there is a hyphen (`-`) followed by a number, +# by which we assume that git describe has added a string after the tag +SEMVER := $(shell echo '$(GITDESC)' | sed -E -e 's/^v//' -e 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) testsemver: @echo from \'$(GITDESC)\' transformed to \'$(SEMVER)\' From c00dc36547e433f52d6dbcbf5345d6cc534c2d8a Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 21 Nov 2024 12:31:58 +0100 Subject: [PATCH 055/176] Remove `-h` for preferred hash configuration This option was in conflict with the help display. --- cmd/csaf_downloader/config.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 71c5055..619cce1 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -88,7 +88,7 @@ type config struct { ignorePattern filter.PatternMatcher //lint:ignore SA5008 We are using choice or than once: sha256, sha512 - PreferredHash hashAlgorithm `long:"preferred_hash" short:"h" choice:"sha256" choice:"sha512" value-name:"HASH" description:"HASH to prefer" toml:"preferred_hash"` + PreferredHash hashAlgorithm `long:"preferred_hash" choice:"sha256" choice:"sha512" value-name:"HASH" description:"HASH to prefer" toml:"preferred_hash"` } // configPaths are the potential file locations of the config file. From de047b76829f898ba9e22be99ca384dc0ddc7563 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Thu, 21 Nov 2024 12:53:07 +0100 Subject: [PATCH 056/176] Feat: Add prefered hash to downloader docs --- docs/csaf_downloader.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index fcf6634..6335366 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -32,6 +32,7 @@ Application Options: --logfile=FILE FILE to log downloading to (default: downloader.log) --loglevel=LEVEL[debug|info|warn|error] LEVEL of logging details (default: info) -c, --config=TOML-FILE Path to config TOML file + --preferred_hash=HASH[sha256|sha512] HASH to prefer Help Options: -h, --help Show this help message From 01645f55598e01e891c1a146eda6b9817b2e9c9c Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Thu, 21 Nov 2024 12:49:02 +0100 Subject: [PATCH 057/176] Fix: Update downloader docs --- docs/csaf_downloader.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 07c6e63..04f93b2 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -22,6 +22,7 @@ Application Options: -f, --folder=FOLDER Download into a given subFOLDER -i, --ignore_pattern=PATTERN Do not download files if their URLs match any of the given PATTERNs -H, --header= One or more extra HTTP header fields + --enumerate_pmd_only If this flag is set to true, the downloader will only enumerate valid provider metadata files, but not download documents --validator=URL URL to validate documents remotely --validator_cache=FILE FILE to cache remote validations --validator_preset=PRESETS One or more PRESETS to validate remotely (default: [mandatory]) @@ -30,8 +31,8 @@ Application Options: --forward_header= One or more extra HTTP header fields used by forwarding --forward_queue=LENGTH Maximal queue LENGTH before forwarder (default: 5) --forward_insecure Do not check TLS certificates from forward endpoint - --logfile=FILE FILE to log downloading to (default: downloader.log) - --loglevel=LEVEL[debug|info|warn|error] LEVEL of logging details (default: info) + --log_file=FILE FILE to log downloading to (default: downloader.log) + --log_level=LEVEL[debug|info|warn|error] LEVEL of logging details (default: info) -c, --config=TOML-FILE Path to config TOML file Help Options: From fe4f01d06255e67db2c5ee3f6f3e9a1453b2dea0 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Fri, 22 Nov 2024 14:52:56 +0100 Subject: [PATCH 058/176] fix: Link to file was not working (#592) --- docs/csaf_provider.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index b88924d..cb27f9f 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -4,7 +4,7 @@ The [setup docs](../README.md#setup-trusted-provider) explain how to wire this up with nginx and where the config file lives. When installed, two endpoints are offered, -and you should use the [csaf_uploader](../docs/csaf_uploader) +and you should use the [csaf_uploader](../docs/csaf_uploader.md) to access them: ### /api/create From f6d7589fde4b7208572d6a0781dd0624ecbbe582 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Fri, 22 Nov 2024 15:58:41 +0100 Subject: [PATCH 059/176] Add required upload permissions --- .github/workflows/release.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4bcd6ba..d1e370f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,6 +8,8 @@ jobs: releases-matrix: name: Release Go binaries runs-on: ubuntu-20.04 + permissions: + contents: write steps: - name: Checkout uses: actions/checkout@v3 From 9495d8b1c38ac814f10fd29762e509ed849203db Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Fri, 22 Nov 2024 16:10:54 +0100 Subject: [PATCH 060/176] Update Go 3rd party libs --- go.mod | 30 +++++++++++----------- go.sum | 80 +++++++++++++++++++++++----------------------------------- 2 files changed, 47 insertions(+), 63 deletions(-) diff --git a/go.mod b/go.mod index c8101f0..1ef2216 100644 --- a/go.mod +++ b/go.mod @@ -1,31 +1,31 @@ module github.com/gocsaf/csaf/v3 -go 1.22 +go 1.22.9 require ( - github.com/BurntSushi/toml v1.3.2 + github.com/BurntSushi/toml v1.4.0 github.com/Intevation/gval v1.3.0 github.com/Intevation/jsonpath v0.2.1 - github.com/ProtonMail/gopenpgp/v2 v2.7.4 + github.com/ProtonMail/gopenpgp/v2 v2.8.0 github.com/PuerkitoBio/goquery v1.8.1 - github.com/gofrs/flock v0.8.1 - github.com/jessevdk/go-flags v1.5.0 + github.com/gofrs/flock v0.12.1 + github.com/jessevdk/go-flags v1.6.1 github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 - go.etcd.io/bbolt v1.3.8 - golang.org/x/crypto v0.14.0 - golang.org/x/term v0.13.0 - golang.org/x/time v0.3.0 + go.etcd.io/bbolt v1.3.11 + golang.org/x/crypto v0.29.0 + golang.org/x/term v0.26.0 + golang.org/x/time v0.8.0 ) require ( - github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c // indirect + github.com/ProtonMail/go-crypto v1.1.2 // indirect github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect github.com/andybalholm/cascadia v1.3.2 // indirect - github.com/cloudflare/circl v1.3.6 // indirect + github.com/cloudflare/circl v1.5.0 // indirect github.com/pkg/errors v0.9.1 // indirect - github.com/shopspring/decimal v1.3.1 // indirect - golang.org/x/net v0.17.0 // indirect - golang.org/x/sys v0.14.0 // indirect - golang.org/x/text v0.13.0 // indirect + github.com/shopspring/decimal v1.4.0 // indirect + golang.org/x/net v0.31.0 // indirect + golang.org/x/sys v0.27.0 // indirect + golang.org/x/text v0.20.0 // indirect ) diff --git a/go.sum b/go.sum index f81653d..47637e9 100644 --- a/go.sum +++ b/go.sum @@ -1,34 +1,30 @@ -github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= -github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0= +github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/Intevation/gval v1.3.0 h1:+Ze5sft5MmGbZrHj06NVUbcxCb67l9RaPTLMNr37mjw= github.com/Intevation/gval v1.3.0/go.mod h1:xmGyGpP5be12EL0P12h+dqiYG8qn2j3PJxIgkoOHO5o= github.com/Intevation/jsonpath v0.2.1 h1:rINNQJ0Pts5XTFEG+zamtdL7l9uuE1z0FBA+r55Sw+A= github.com/Intevation/jsonpath v0.2.1/go.mod h1:WnZ8weMmwAx/fAO3SutjYFU+v7DFreNYnibV7CiaYIw= -github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= -github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c h1:kMFnB0vCcX7IL/m9Y5LO+KQYv+t1CQOiFe6+SV2J7bE= -github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/ProtonMail/go-crypto v1.1.2 h1:A7JbD57ThNqh7XjmHE+PXpQ3Dqt3BrSAC0AL0Go3KS0= +github.com/ProtonMail/go-crypto v1.1.2/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ekTTXpdwKYF8eBlsYsDVoggDAuAjoK66k= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw= -github.com/ProtonMail/gopenpgp/v2 v2.7.4 h1:Vz/8+HViFFnf2A6XX8JOvZMrA6F5puwNvvF21O1mRlo= -github.com/ProtonMail/gopenpgp/v2 v2.7.4/go.mod h1:IhkNEDaxec6NyzSI0PlxapinnwPVIESk8/76da3Ct3g= +github.com/ProtonMail/gopenpgp/v2 v2.8.0 h1:WvMv3CMcFsqKSM4/Qf8sf3tgyQkzDqQmoSE49bnBuP4= +github.com/ProtonMail/gopenpgp/v2 v2.8.0/go.mod h1:qb2GUSnmA9ipBW5GVtCtEhkummSlqs2A8Ar3S0HBgSY= github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM= github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ= github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA= github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss= github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU= -github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= -github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= -github.com/cloudflare/circl v1.3.6 h1:/xbKIqSHbZXHwkhbrhrt2YOHIwYJlXH94E3tI/gDlUg= -github.com/cloudflare/circl v1.3.6/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys= +github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= -github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= +github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc= -github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= +github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4= +github.com/jessevdk/go-flags v1.6.1/go.mod h1:Mk8T1hIAWpOiJiHa9rJASDK2UGWji0EuPGBnNLMooyc= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -37,78 +33,66 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY= -github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= +github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= -go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= +go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0= +go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= -golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= -golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= -golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= -golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= -golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= -golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= +golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= +golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= -golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg= +golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= From 7824f3b48da9c868940936b3839483d15feaf8f3 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 22 Nov 2024 16:31:56 +0100 Subject: [PATCH 061/176] Improve hash fetching and logging --- cmd/csaf_downloader/config.go | 4 +- cmd/csaf_downloader/downloader.go | 108 +++++++++++++++++++++++------- 2 files changed, 84 insertions(+), 28 deletions(-) diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 619cce1..a262ef7 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -44,8 +44,8 @@ const ( type hashAlgorithm string const ( - algSha256 = hashAlgorithm("SHA256") - algSha2512 = hashAlgorithm("SHA512") + algSha256 = hashAlgorithm("SHA256") + algSha512 = hashAlgorithm("SHA512") ) type config struct { diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 3cb7332..18fc1e8 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -25,6 +25,7 @@ import ( "os" "path" "path/filepath" + "slices" "strconv" "strings" "sync" @@ -37,6 +38,13 @@ import ( "github.com/csaf-poc/csaf_distribution/v3/util" ) +type hashFetchInfo struct { + url string + preferred bool + warn bool + hashType hashAlgorithm +} + type downloader struct { cfg *config keys *crypto.KeyRing @@ -496,35 +504,39 @@ nextAdvisory: signData []byte ) - if (d.cfg.PreferredHash != "sha512" || file.SHA512URL() == "") && file.SHA256URL() != "" { - // Only hash when we have a remote counterpart we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA256", - "url", file.SHA256URL(), - "error", err) - } else { - slog.Info("SHA256 not present", "file", file.URL()) - } - } else { - s256 = sha256.New() - writers = append(writers, s256) + hashToFetch := []hashFetchInfo{} + if file.SHA512URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA512URL(), + warn: true, + hashType: algSha512, + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512))}) + } else { + slog.Info("SHA512 not present") + } + if file.SHA256URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA256URL(), + warn: true, + hashType: algSha256, + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256))}) + } else { + slog.Info("SHA256 not present") + } + if file.IsDirectory() { + for i := range hashToFetch { + hashToFetch[i].warn = false } } - if (d.cfg.PreferredHash != "sha256" || file.SHA256URL() == "") && file.SHA512URL() != "" { - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA512", - "url", file.SHA512URL(), - "error", err) - } else { - slog.Info("SHA512 not present", "file", file.URL()) - } - } else { - s512 = sha512.New() - writers = append(writers, s512) - } + remoteSHA256, s256Data, remoteSHA512, s512Data = loadHashes(client, hashToFetch) + if remoteSHA512 != nil { + s512 = sha512.New() + writers = append(writers, s512) + } + if remoteSHA256 != nil { + s256 = sha256.New() + writers = append(writers, s256) } // Remember the data as we need to store it to file later. @@ -755,6 +767,50 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, return sign, data, nil } +func loadHashes(client util.Client, hashes []hashFetchInfo) ([]byte, []byte, []byte, []byte) { + var remoteSha256, remoteSha512, sha256Data, sha512Data []byte + + // Load preferred hashes first + slices.SortStableFunc(hashes, func(a, b hashFetchInfo) int { + if a.preferred == b.preferred { + return 0 + } + if a.preferred && !b.preferred { + return -1 + } + return 1 + }) + for _, h := range hashes { + if remote, data, err := loadHash(client, h.url); err != nil { + if h.warn { + slog.Warn("Cannot fetch hash", + "hash", h.hashType, + "url", h.url, + "error", err) + } else { + slog.Info("Hash not present", "hash", h.hashType, "file", h.url) + } + } else { + switch h.hashType { + case algSha512: + { + remoteSha512 = remote + sha512Data = data + } + case algSha256: + { + remoteSha256 = remote + sha256Data = data + } + } + if h.preferred { + break + } + } + } + return remoteSha256, sha256Data, remoteSha512, sha512Data +} + func loadHash(client util.Client, p string) ([]byte, []byte, error) { resp, err := client.Get(p) if err != nil { From b2180849e99f2b1df9dbc97a6b2d3c6d93fcc679 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Mon, 25 Nov 2024 09:38:13 +0100 Subject: [PATCH 062/176] Update README.md that go paths can be adjusted --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index cec9248..463b1d9 100644 --- a/README.md +++ b/README.md @@ -12,8 +12,8 @@ > [!IMPORTANT] > To avoid future breakage, if you still use `csaf-poc`: > 1. Adjust your HTML links. -> 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. -> +> 2. Adjust your go module paths, see [#579](https://github.com/gocsaf/csaf/issues/579#issuecomment-2497244379). +> > (This repository was moved here on 2024-10-28. The old one is deprecated > and redirection will be switched off a few months later.) From a167bf65ad14acb142dba288529ee760799f338d Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 25 Nov 2024 14:27:56 +0100 Subject: [PATCH 063/176] Add Apache 2.0 license to root folder This allows other programs like google/licensecheck to correctly detect the license. This is required to display the documentation in `pkg.go.dev`. --- LICENSE-Apache-2.0.txt | 73 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 LICENSE-Apache-2.0.txt diff --git a/LICENSE-Apache-2.0.txt b/LICENSE-Apache-2.0.txt new file mode 100644 index 0000000..137069b --- /dev/null +++ b/LICENSE-Apache-2.0.txt @@ -0,0 +1,73 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. From ffb4eff933fef6c222dd131e90675152589c8003 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 27 Nov 2024 12:15:21 +0100 Subject: [PATCH 064/176] Merge unittest into sha-handling commit 990c74a1a64cf8688a7fd14ebb524ce96a320eef Merge: 86d7ce1 7824f3b Author: koplas Date: Fri Nov 22 16:58:46 2024 +0100 Merge branch 'sha-handling' into unittest commit 86d7ce13dcf1ff2250f27b5e9b811da38937fff5 Merge: a6807d2 79b8900 Author: koplas Date: Fri Nov 22 16:54:45 2024 +0100 Merge branch 'sha-handling' into unittest commit 79b89009dd7f5dd748ccedc0ea87ea26e75b65d2 Author: koplas Date: Fri Nov 22 16:31:56 2024 +0100 Improve hash fetching and logging commit a6807d24d604cafa4e5d30d6ba9c948490d9f883 Merge: ddb5518 d18d2c3 Author: koplas Date: Fri Nov 22 16:51:55 2024 +0100 Merge branch 'sha-handling' into unittest commit d18d2c3bf17950dad276457136c2262988cca129 Author: koplas Date: Fri Nov 22 16:31:56 2024 +0100 Improve hash fetching and logging commit ddb5518c6d57adce14fb5f7665d219778e642c53 Author: koplas <54645365+koplas@users.noreply.github.com> Date: Tue Sep 17 10:45:25 2024 +0200 Extend SHA marking tests commit 13c94f4fa06c0ba3ca52f76b93934f6855d80e81 Author: koplas Date: Mon Sep 16 20:46:31 2024 +0200 Use temp directory for downloads commit 1819b4896babaf9bd1136e5846e07224fb201b18 Author: koplas Date: Mon Sep 16 20:37:55 2024 +0200 Fix rolie feed commit 989e3667bad4c10cb1a779d3a7efd526929dc002 Author: koplas Date: Mon Sep 16 20:23:22 2024 +0200 Fix provider-metadata.json commit 714735d74a159e1fd8f7e756673742708dc758d4 Author: koplas Date: Mon Sep 16 20:08:21 2024 +0200 Implement provider handler commit d488e3994749c3e7daf2c00f2a7952974a8dce49 Author: koplas Date: Mon Sep 16 16:26:37 2024 +0200 Add info about gpg key commit a9bf9da130a04fffbf00481930575d1b292d138f Author: koplas Date: Mon Sep 16 16:12:49 2024 +0200 Rename directory testdata commit 6ca6dfee25c947758fac0abfb28e10049809d3ec Author: koplas Date: Mon Sep 16 16:01:41 2024 +0200 Add initial downloader tests commit 20bee797c61a457c58b37c208f0540a5ed7d7468 Author: koplas Date: Mon Sep 16 15:58:31 2024 +0200 Fix: Remove unecessary error print commit 8e4e508073e6a8d34922295de35da42b4ea8a93a Author: koplas Date: Mon Sep 16 14:50:48 2024 +0200 Extend links test commit 3ba29f94de3eebc379adc021f40fd5cd0587b57d Author: koplas Date: Mon Sep 16 14:11:14 2024 +0200 Add initial directory feed testdata commit dee55aafd9052adcda28a231b04271d866d06dd7 Author: koplas <54645365+koplas@users.noreply.github.com> Date: Mon Sep 16 10:47:32 2024 +0200 Add initial testdata commit cd9338ae7279791db62e28e8f4b5cfe9cf370881 Author: koplas <54645365+koplas@users.noreply.github.com> Date: Thu Sep 12 15:54:42 2024 +0200 Add initial download unittests --- cmd/csaf_aggregator/client_test.go | 67 ++++++ cmd/csaf_checker/links_test.go | 80 ++++++- cmd/csaf_downloader/config.go | 4 +- cmd/csaf_downloader/downloader.go | 6 + cmd/csaf_downloader/downloader_test.go | 218 ++++++++++++++++++ csaf/providermetaloader.go | 2 +- .../openpgp/info.txt | 2 + .../openpgp/privkey.asc | 15 ++ .../openpgp/pubkey.asc | 13 ++ .../provider-metadata.json | 25 ++ .../simple-directory-provider/security.txt | 2 + .../avendor-advisory-0004-not-listed.json | 170 ++++++++++++++ .../white/avendor-advisory-0004.json | 170 ++++++++++++++ .../white/avendor-advisory-0004.json.asc | 7 + .../white/avendor-advisory-0004.json.sha256 | 1 + .../white/avendor-advisory-0004.json.sha512 | 1 + .../white/changes.csv | 1 + .../white/index.html | 6 + .../simple-directory-provider/white/index.txt | 1 + .../simple-rolie-provider/openpgp/info.txt | 2 + .../simple-rolie-provider/openpgp/privkey.asc | 15 ++ .../simple-rolie-provider/openpgp/pubkey.asc | 13 ++ .../provider-metadata.json | 33 +++ testdata/simple-rolie-provider/security.txt | 2 + testdata/simple-rolie-provider/service.json | 23 ++ .../white/avendor-advisory-0004.json | 170 ++++++++++++++ .../white/avendor-advisory-0004.json.asc | 7 + .../white/avendor-advisory-0004.json.sha256 | 1 + .../white/avendor-advisory-0004.json.sha512 | 1 + .../white/white-feed.json | 61 +++++ 30 files changed, 1115 insertions(+), 4 deletions(-) create mode 100644 cmd/csaf_aggregator/client_test.go create mode 100644 cmd/csaf_downloader/downloader_test.go create mode 100644 testdata/simple-directory-provider/openpgp/info.txt create mode 100644 testdata/simple-directory-provider/openpgp/privkey.asc create mode 100644 testdata/simple-directory-provider/openpgp/pubkey.asc create mode 100644 testdata/simple-directory-provider/provider-metadata.json create mode 100644 testdata/simple-directory-provider/security.txt create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 create mode 100644 testdata/simple-directory-provider/white/changes.csv create mode 100644 testdata/simple-directory-provider/white/index.html create mode 100644 testdata/simple-directory-provider/white/index.txt create mode 100644 testdata/simple-rolie-provider/openpgp/info.txt create mode 100644 testdata/simple-rolie-provider/openpgp/privkey.asc create mode 100644 testdata/simple-rolie-provider/openpgp/pubkey.asc create mode 100644 testdata/simple-rolie-provider/provider-metadata.json create mode 100644 testdata/simple-rolie-provider/security.txt create mode 100644 testdata/simple-rolie-provider/service.json create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 create mode 100644 testdata/simple-rolie-provider/white/white-feed.json diff --git a/cmd/csaf_aggregator/client_test.go b/cmd/csaf_aggregator/client_test.go new file mode 100644 index 0000000..c08b29a --- /dev/null +++ b/cmd/csaf_aggregator/client_test.go @@ -0,0 +1,67 @@ +// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package main + +import ( + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/csaf-poc/csaf_distribution/v3/util" +) + +func Test_downloadJSON(t *testing.T) { + tests := []struct { + name string + statusCode int + contentType string + wantErr error + }{ + { + name: "status ok, application/json", + statusCode: http.StatusOK, + contentType: "application/json", + wantErr: nil, + }, + { + name: "status found, application/json", + statusCode: http.StatusFound, + contentType: "application/json", + wantErr: errNotFound, + }, + { + name: "status ok, application/xml", + statusCode: http.StatusOK, + contentType: "application/xml", + wantErr: errNotFound, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + found := func(r io.Reader) error { + return nil + } + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Add("Content-Type", test.contentType) + w.WriteHeader(test.statusCode) + })) + defer server.Close() + hClient := http.Client{} + client := util.Client(&hClient) + if gotErr := downloadJSON(client, server.URL, found); gotErr != test.wantErr { + t.Errorf("downloadJSON: Expected %q but got %q.", test.wantErr, gotErr) + } + }) + } +} diff --git a/cmd/csaf_checker/links_test.go b/cmd/csaf_checker/links_test.go index 8abf4e6..aa04222 100644 --- a/cmd/csaf_checker/links_test.go +++ b/cmd/csaf_checker/links_test.go @@ -10,8 +10,12 @@ package main import ( "fmt" + "net/http" + "net/http/httptest" "strings" "testing" + + "github.com/csaf-poc/csaf_distribution/v3/util" ) const page0 = ` @@ -31,7 +35,6 @@ const page0 = ` ` func TestLinksOnPage(t *testing.T) { - var links []string err := linksOnPage( @@ -58,3 +61,78 @@ func TestLinksOnPage(t *testing.T) { } } } + +func Test_listed(t *testing.T) { + tests := []struct { + name string + badDirs util.Set[string] + path string + want bool + }{ + { + name: "listed path", + badDirs: util.Set[string]{}, + path: "/white/avendor-advisory-0004.json", + want: true, + }, + { + name: "badDirs contains path", + badDirs: util.Set[string]{"/white/": {}}, + path: "/white/avendor-advisory-0004.json", + want: false, + }, + { + name: "not found", + badDirs: util.Set[string]{}, + path: "/not-found/resource.json", + want: false, + }, + { + name: "badDirs does not contain path", + badDirs: util.Set[string]{"/bad-dir/": {}}, + path: "/white/avendor-advisory-0004.json", + want: true, + }, + { + name: "unlisted path", + badDirs: util.Set[string]{}, + path: "/white/avendor-advisory-0004-not-listed.json", + want: false, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + serverURL := "" + fs := http.FileServer(http.Dir("../../testdata/simple-directory-provider")) + server := httptest.NewTLSServer(fs) + defer server.Close() + + serverURL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + pgs := pages{} + cfg := config{RemoteValidator: "", RemoteValidatorCache: ""} + p, err := newProcessor(&cfg) + if err != nil { + t.Error(err) + } + p.client = client + + badDirs := util.Set[string]{} + for dir := range test.badDirs { + badDirs.Add(serverURL + dir) + } + + got, _ := pgs.listed(serverURL+test.path, p, badDirs) + if got != test.want { + t.Errorf("%q: Expected %t but got %t.", test.name, test.want, got) + } + }) + } +} diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index a262ef7..a44fa81 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -44,8 +44,8 @@ const ( type hashAlgorithm string const ( - algSha256 = hashAlgorithm("SHA256") - algSha512 = hashAlgorithm("SHA512") + algSha256 = hashAlgorithm("sha256") + algSha512 = hashAlgorithm("sha512") ) type config struct { diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 18fc1e8..ca5cccc 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -47,6 +47,7 @@ type hashFetchInfo struct { type downloader struct { cfg *config + client *util.Client // Used for testing keys *crypto.KeyRing validator csaf.RemoteValidator forwarder *forwarder @@ -131,6 +132,11 @@ func (d *downloader) httpClient() util.Client { client := util.Client(&hClient) + // Overwrite for testing purposes + if client != nil { + client = *d.client + } + // Add extra headers. if len(d.cfg.ExtraHeader) > 0 { client = &util.HeaderClient{ diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go new file mode 100644 index 0000000..cf02035 --- /dev/null +++ b/cmd/csaf_downloader/downloader_test.go @@ -0,0 +1,218 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +package main + +import ( + "context" + "errors" + "html/template" + "log/slog" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + + "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/csaf-poc/csaf_distribution/v3/util" +) + +type ProviderParams struct { + URL string + EnableSha256 bool + EnableSha512 bool +} + +func ProviderHandler(params *ProviderParams, directoryProvider bool) http.HandlerFunc { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + path := "../../testdata/" + if directoryProvider { + path += "simple-directory-provider" + } else { + path += "simple-rolie-provider" + } + + path += r.URL.Path + + if strings.HasSuffix(r.URL.Path, "/") { + path += "index.html" + } + + content, err := os.ReadFile(path) + if err != nil { + w.WriteHeader(http.StatusNotFound) + return + } + switch { + case strings.HasSuffix(path, ".html"): + w.Header().Add("Content-Type", "text/html") + case strings.HasSuffix(path, ".json"): + w.Header().Add("Content-Type", "application/json") + case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: + w.WriteHeader(http.StatusNotFound) + return + case strings.HasSuffix(path, ".sha512") && directoryProvider && !params.EnableSha512: + w.WriteHeader(http.StatusNotFound) + return + default: + w.Header().Add("Content-Type", "text/plain") + } + + tmplt, err := template.New("base").Parse(string(content)) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + err = tmplt.Execute(w, params) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + }) +} + +func checkIfFileExists(path string, t *testing.T) bool { + if _, err := os.Stat(path); err == nil { + return true + } else if errors.Is(err, os.ErrNotExist) { + return false + } else { + t.Fatalf("Failed to check if file exists: %v", err) + return false + } +} + +func TestShaMarking(t *testing.T) { + tests := []struct { + name string + directoryProvider bool + wantSha256 bool + wantSha512 bool + enableSha256 bool + enableSha512 bool + preferredHash hashAlgorithm + }{ + { + name: "want sha256 and sha512", + directoryProvider: false, + wantSha256: true, + wantSha512: true, + enableSha256: true, + enableSha512: true, + }, + { + name: "only want sha256", + directoryProvider: false, + wantSha256: true, + wantSha512: false, + enableSha256: true, + enableSha512: true, + preferredHash: algSha256, + }, + { + name: "only want sha512", + directoryProvider: false, + wantSha256: false, + wantSha512: true, + enableSha256: true, + enableSha512: true, + preferredHash: algSha512, + }, + { + name: "only want sha512", + directoryProvider: false, + wantSha256: false, + wantSha512: true, + enableSha256: true, + enableSha512: true, + preferredHash: algSha512, + }, + + { + name: "only deliver sha256", + directoryProvider: false, + wantSha256: true, + wantSha512: false, + enableSha256: true, + enableSha512: false, + preferredHash: algSha512, + }, + { + name: "only want sha256, directory provider", + directoryProvider: true, + wantSha256: true, + wantSha512: false, + enableSha256: true, + enableSha512: true, + preferredHash: algSha256, + }, + { + name: "only want sha512, directory provider", + directoryProvider: true, + wantSha256: false, + wantSha512: true, + enableSha256: true, + enableSha512: true, + preferredHash: algSha512, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + serverURL := "" + params := ProviderParams{ + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + } + server := httptest.NewTLSServer(ProviderHandler(¶ms, test.directoryProvider)) + defer server.Close() + + serverURL = server.URL + params.URL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + tempDir := t.TempDir() + cfg := config{LogLevel: &options.LogLevel{Level: slog.LevelDebug}, Directory: tempDir, PreferredHash: test.preferredHash} + err := cfg.prepare() + if err != nil { + t.Fatalf("SHA marking config failed: %v", err) + } + d, err := newDownloader(&cfg) + if err != nil { + t.Fatalf("could not init downloader: %v", err) + } + d.client = &client + + ctx := context.Background() + err = d.run(ctx, []string{serverURL + "/provider-metadata.json"}) + if err != nil { + t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) + } + d.close() + + // Check for downloaded hashes + sha256Exists := checkIfFileExists(tempDir+"/white/2020/avendor-advisory-0004.json.sha256", t) + sha512Exists := checkIfFileExists(tempDir+"/white/2020/avendor-advisory-0004.json.sha512", t) + + if sha256Exists != test.wantSha256 { + t.Errorf("%v: expected sha256 hash present to be %v, got: %v", test.name, test.wantSha256, sha256Exists) + } + + if sha512Exists != test.wantSha512 { + t.Errorf("%v: expected sha512 hash present to be %v, got: %v", test.name, test.wantSha512, sha512Exists) + } + }) + } +} diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index b21ddc6..aa3c38a 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -352,7 +352,7 @@ func (pmdl *ProviderMetadataLoader) loadFromURL(path string) *LoadedProviderMeta case len(errors) > 0: result.Messages = []ProviderMetadataLoadMessage{{ Type: SchemaValidationFailed, - Message: fmt.Sprintf("%s: Validating against JSON schema failed: %v", path, err), + Message: fmt.Sprintf("%s: Validating against JSON schema failed", path), }} for _, msg := range errors { result.Messages.Add( diff --git a/testdata/simple-directory-provider/openpgp/info.txt b/testdata/simple-directory-provider/openpgp/info.txt new file mode 100644 index 0000000..3a159f6 --- /dev/null +++ b/testdata/simple-directory-provider/openpgp/info.txt @@ -0,0 +1,2 @@ +The GPG key was generated with no passphrase and this command: +`gpg --default-new-key-algo "ed25519/cert,sign+cv25519/encr" --quick-generate-key security@example.com"` diff --git a/testdata/simple-directory-provider/openpgp/privkey.asc b/testdata/simple-directory-provider/openpgp/privkey.asc new file mode 100644 index 0000000..816f309 --- /dev/null +++ b/testdata/simple-directory-provider/openpgp/privkey.asc @@ -0,0 +1,15 @@ +-----BEGIN PGP PRIVATE KEY BLOCK----- + +lFgEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr4AAQD5G5xy/yTN5b+lvV5Ahrbz1qOZ/wmKTieGOH9GZb6JwhHwtBRzZWN1 +cml0eUBleGFtcGxlLmNvbYiZBBMWCgBBFiEEqJFMovEROcammgAY+zzZsV3mFZYF +AmbbEDcCGwMFCQWjmoAFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQ+zzZ +sV3mFZZskQEAg5Dttqm6TA7MtLxz7VSlklx95LQr9d5jm4jcOaqlGT0A/1mAAlUq +SDySFGI6DFQLcaZaUd9Yl+1b0Icr0tUiOaQHnF0EZtsQNxIKKwYBBAGXVQEFAQEH +QOTHP4FkopIGJMWXTYsaeQ1Dugd+yNYWB357vRYq6QsiAwEIBwAA/0RIazq1s8Oe +23jvNaZGb/adDYnRrkCMXXTBKsuA6WOAEhKIeAQYFgoAIBYhBKiRTKLxETnGppoA +GPs82bFd5hWWBQJm2xA3AhsMAAoJEPs82bFd5hWWDKABAOl+NoM6FBhKAvckUXDR +MLZ4k778N4Vy9VHbectjRKj1AQCO3JOmON+U6/mjohXrc2bwzKzt2yGiLP2HMxDx +uzMXBQ== +=4XHC +-----END PGP PRIVATE KEY BLOCK----- diff --git a/testdata/simple-directory-provider/openpgp/pubkey.asc b/testdata/simple-directory-provider/openpgp/pubkey.asc new file mode 100644 index 0000000..88cb720 --- /dev/null +++ b/testdata/simple-directory-provider/openpgp/pubkey.asc @@ -0,0 +1,13 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mDMEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr60FHNlY3VyaXR5QGV4YW1wbGUuY29tiJkEExYKAEEWIQSokUyi8RE5xqaa +ABj7PNmxXeYVlgUCZtsQNwIbAwUJBaOagAULCQgHAgIiAgYVCgkICwIEFgIDAQIe +BwIXgAAKCRD7PNmxXeYVlmyRAQCDkO22qbpMDsy0vHPtVKWSXH3ktCv13mObiNw5 +qqUZPQD/WYACVSpIPJIUYjoMVAtxplpR31iX7VvQhyvS1SI5pAe4OARm2xA3Egor +BgEEAZdVAQUBAQdA5Mc/gWSikgYkxZdNixp5DUO6B37I1hYHfnu9FirpCyIDAQgH +iHgEGBYKACAWIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZtsQNwIbDAAKCRD7PNmx +XeYVlgygAQDpfjaDOhQYSgL3JFFw0TC2eJO+/DeFcvVR23nLY0So9QEAjtyTpjjf +lOv5o6IV63Nm8Mys7dshoiz9hzMQ8bszFwU= +=rhGT +-----END PGP PUBLIC KEY BLOCK----- diff --git a/testdata/simple-directory-provider/provider-metadata.json b/testdata/simple-directory-provider/provider-metadata.json new file mode 100644 index 0000000..792afd3 --- /dev/null +++ b/testdata/simple-directory-provider/provider-metadata.json @@ -0,0 +1,25 @@ +{ + "canonical_url": "{{.URL}}/provider-metadata.json", + "distributions": [ + { + "directory_url": "{{.URL}}/white/" + } + ], + "last_updated": "2020-01-01T00:00:00Z", + "list_on_CSAF_aggregators": true, + "metadata_version": "2.0", + "mirror_on_CSAF_aggregators": true, + "public_openpgp_keys": [ + { + "fingerprint": "A8914CA2F11139C6A69A0018FB3CD9B15DE61596", + "url": "{{.URL}}/openpgp/pubkey.asc" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc", + "namespace": "https://example.com", + "contact_details": "mailto:security@example.com" + }, + "role": "csaf_trusted_provider" +} diff --git a/testdata/simple-directory-provider/security.txt b/testdata/simple-directory-provider/security.txt new file mode 100644 index 0000000..0ae943d --- /dev/null +++ b/testdata/simple-directory-provider/security.txt @@ -0,0 +1,2 @@ +CSAF: /provider-metadata.json + diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json b/testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json new file mode 100644 index 0000000..0e194e9 --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json @@ -0,0 +1,170 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json b/testdata/simple-directory-provider/white/avendor-advisory-0004.json new file mode 100644 index 0000000..0e194e9 --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json @@ -0,0 +1,170 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc new file mode 100644 index 0000000..9dff47b --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc @@ -0,0 +1,7 @@ +-----BEGIN PGP SIGNATURE----- + +iHUEABYKAB0WIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZukv9QAKCRD7PNmxXeYV +ljq0AP9n/rTgoNCJzSTZzNrrMy28ZR+Ppp1MSPWGFUzsx6qLJgD/d8cu0lokMsXf +y0uc9k7hrla/ajFUzNt3AVvT+CPFtAo= +=7E66 +-----END PGP SIGNATURE----- diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 new file mode 100644 index 0000000..851b27c --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 @@ -0,0 +1 @@ +cb263bf1beab18b893de63f2966d0d8c5f38d60101c24d3fd7a5feebaad02c3b avendor-advisory-0004.json diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 new file mode 100644 index 0000000..6703550 --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 @@ -0,0 +1 @@ +39476e1d08a0871d166091c90de259544382a3599eebda118a93468499a30fd034286086c461a97d3d5298e093b0be3868e8d89d8a6a255c4aa6adb81ebbfcad avendor-advisory-0004.json diff --git a/testdata/simple-directory-provider/white/changes.csv b/testdata/simple-directory-provider/white/changes.csv new file mode 100644 index 0000000..4acdb29 --- /dev/null +++ b/testdata/simple-directory-provider/white/changes.csv @@ -0,0 +1 @@ +"avendor-advisory-0004.json","2020-01-01T00:00:00+00:00" diff --git a/testdata/simple-directory-provider/white/index.html b/testdata/simple-directory-provider/white/index.html new file mode 100644 index 0000000..bcfabd9 --- /dev/null +++ b/testdata/simple-directory-provider/white/index.html @@ -0,0 +1,6 @@ + + + + avendor-advisory-0004 + + diff --git a/testdata/simple-directory-provider/white/index.txt b/testdata/simple-directory-provider/white/index.txt new file mode 100644 index 0000000..d19d30f --- /dev/null +++ b/testdata/simple-directory-provider/white/index.txt @@ -0,0 +1 @@ +avendor-advisory-0004.json diff --git a/testdata/simple-rolie-provider/openpgp/info.txt b/testdata/simple-rolie-provider/openpgp/info.txt new file mode 100644 index 0000000..3a159f6 --- /dev/null +++ b/testdata/simple-rolie-provider/openpgp/info.txt @@ -0,0 +1,2 @@ +The GPG key was generated with no passphrase and this command: +`gpg --default-new-key-algo "ed25519/cert,sign+cv25519/encr" --quick-generate-key security@example.com"` diff --git a/testdata/simple-rolie-provider/openpgp/privkey.asc b/testdata/simple-rolie-provider/openpgp/privkey.asc new file mode 100644 index 0000000..816f309 --- /dev/null +++ b/testdata/simple-rolie-provider/openpgp/privkey.asc @@ -0,0 +1,15 @@ +-----BEGIN PGP PRIVATE KEY BLOCK----- + +lFgEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr4AAQD5G5xy/yTN5b+lvV5Ahrbz1qOZ/wmKTieGOH9GZb6JwhHwtBRzZWN1 +cml0eUBleGFtcGxlLmNvbYiZBBMWCgBBFiEEqJFMovEROcammgAY+zzZsV3mFZYF +AmbbEDcCGwMFCQWjmoAFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQ+zzZ +sV3mFZZskQEAg5Dttqm6TA7MtLxz7VSlklx95LQr9d5jm4jcOaqlGT0A/1mAAlUq +SDySFGI6DFQLcaZaUd9Yl+1b0Icr0tUiOaQHnF0EZtsQNxIKKwYBBAGXVQEFAQEH +QOTHP4FkopIGJMWXTYsaeQ1Dugd+yNYWB357vRYq6QsiAwEIBwAA/0RIazq1s8Oe +23jvNaZGb/adDYnRrkCMXXTBKsuA6WOAEhKIeAQYFgoAIBYhBKiRTKLxETnGppoA +GPs82bFd5hWWBQJm2xA3AhsMAAoJEPs82bFd5hWWDKABAOl+NoM6FBhKAvckUXDR +MLZ4k778N4Vy9VHbectjRKj1AQCO3JOmON+U6/mjohXrc2bwzKzt2yGiLP2HMxDx +uzMXBQ== +=4XHC +-----END PGP PRIVATE KEY BLOCK----- diff --git a/testdata/simple-rolie-provider/openpgp/pubkey.asc b/testdata/simple-rolie-provider/openpgp/pubkey.asc new file mode 100644 index 0000000..88cb720 --- /dev/null +++ b/testdata/simple-rolie-provider/openpgp/pubkey.asc @@ -0,0 +1,13 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mDMEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr60FHNlY3VyaXR5QGV4YW1wbGUuY29tiJkEExYKAEEWIQSokUyi8RE5xqaa +ABj7PNmxXeYVlgUCZtsQNwIbAwUJBaOagAULCQgHAgIiAgYVCgkICwIEFgIDAQIe +BwIXgAAKCRD7PNmxXeYVlmyRAQCDkO22qbpMDsy0vHPtVKWSXH3ktCv13mObiNw5 +qqUZPQD/WYACVSpIPJIUYjoMVAtxplpR31iX7VvQhyvS1SI5pAe4OARm2xA3Egor +BgEEAZdVAQUBAQdA5Mc/gWSikgYkxZdNixp5DUO6B37I1hYHfnu9FirpCyIDAQgH +iHgEGBYKACAWIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZtsQNwIbDAAKCRD7PNmx +XeYVlgygAQDpfjaDOhQYSgL3JFFw0TC2eJO+/DeFcvVR23nLY0So9QEAjtyTpjjf +lOv5o6IV63Nm8Mys7dshoiz9hzMQ8bszFwU= +=rhGT +-----END PGP PUBLIC KEY BLOCK----- diff --git a/testdata/simple-rolie-provider/provider-metadata.json b/testdata/simple-rolie-provider/provider-metadata.json new file mode 100644 index 0000000..7abb316 --- /dev/null +++ b/testdata/simple-rolie-provider/provider-metadata.json @@ -0,0 +1,33 @@ +{ + "canonical_url": "{{.URL}}/provider-metadata.json", + "distributions": [ + { + "rolie": { + "feeds": [ + { + "summary": "TLP:WHITE advisories", + "tlp_label": "WHITE", + "url": "{{.URL}}/white/white-feed.json" + } + ] + } + } + ], + "last_updated": "2020-01-01T00:00:00Z", + "list_on_CSAF_aggregators": true, + "metadata_version": "2.0", + "mirror_on_CSAF_aggregators": true, + "public_openpgp_keys": [ + { + "fingerprint": "A8914CA2F11139C6A69A0018FB3CD9B15DE61596", + "url": "{{.URL}}/openpgp/pubkey.asc" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc", + "namespace": "https://example.com", + "contact_details": "mailto:security@example.com" + }, + "role": "csaf_trusted_provider" +} diff --git a/testdata/simple-rolie-provider/security.txt b/testdata/simple-rolie-provider/security.txt new file mode 100644 index 0000000..0ae943d --- /dev/null +++ b/testdata/simple-rolie-provider/security.txt @@ -0,0 +1,2 @@ +CSAF: /provider-metadata.json + diff --git a/testdata/simple-rolie-provider/service.json b/testdata/simple-rolie-provider/service.json new file mode 100644 index 0000000..500d882 --- /dev/null +++ b/testdata/simple-rolie-provider/service.json @@ -0,0 +1,23 @@ +{ + "service": { + "workspace": [ + { + "title": "CSAF feeds", + "collection": [ + { + "title": "CSAF feed (TLP:WHITE)", + "href": "/white/white-feed.json", + "categories": { + "category": [ + { + "scheme": "urn:ietf:params:rolie:category:information-type", + "term": "csaf" + } + ] + } + } + ] + } + ] + } +} diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json new file mode 100644 index 0000000..0e194e9 --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json @@ -0,0 +1,170 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc new file mode 100644 index 0000000..9dff47b --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc @@ -0,0 +1,7 @@ +-----BEGIN PGP SIGNATURE----- + +iHUEABYKAB0WIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZukv9QAKCRD7PNmxXeYV +ljq0AP9n/rTgoNCJzSTZzNrrMy28ZR+Ppp1MSPWGFUzsx6qLJgD/d8cu0lokMsXf +y0uc9k7hrla/ajFUzNt3AVvT+CPFtAo= +=7E66 +-----END PGP SIGNATURE----- diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 new file mode 100644 index 0000000..851b27c --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 @@ -0,0 +1 @@ +cb263bf1beab18b893de63f2966d0d8c5f38d60101c24d3fd7a5feebaad02c3b avendor-advisory-0004.json diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 new file mode 100644 index 0000000..6703550 --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 @@ -0,0 +1 @@ +39476e1d08a0871d166091c90de259544382a3599eebda118a93468499a30fd034286086c461a97d3d5298e093b0be3868e8d89d8a6a255c4aa6adb81ebbfcad avendor-advisory-0004.json diff --git a/testdata/simple-rolie-provider/white/white-feed.json b/testdata/simple-rolie-provider/white/white-feed.json new file mode 100644 index 0000000..1bc17bc --- /dev/null +++ b/testdata/simple-rolie-provider/white/white-feed.json @@ -0,0 +1,61 @@ +{ + "feed": { + "id": "csaf-feed-tlp-white", + "title": "CSAF feed (TLP:WHITE)", + "link": [ + { + "rel": "self", + "href": "/white/csaf-feed-tlp-white.json" + }, + { + "rel": "service", + "href": "/service.json" + } + ], + "category": [ + { + "scheme": "urn:ietf:params:rolie:category:information-type", + "term": "csaf" + } + ], + "updated": "2020-01-01T00:00:00Z", + "entry": [ + { + "id": "Avendor-advisory-0004", + "title": "Test CSAF document", + "link": [ + { + "rel": "self", + "href": "/white/avendor-advisory-0004.json" + }, + {{if .EnableSha256}} + { + "rel": "hash", + "href": "/white/avendor-advisory-0004.json.sha256" + }, + {{end}} + {{if .EnableSha512}} + { + "rel": "hash", + "href": "/white/avendor-advisory-0004.json.sha512" + }, + {{end}} + { + "rel": "signature", + "href": "/white/avendor-advisory-0004.json.asc" + } + ], + "published": "2020-01-01T00:00:00Z", + "updated": "2020-01-01T00:00:00Z", + "content": { + "type": "application/json", + "src": "/avendor-advisory-0004.json" + }, + "format": { + "schema": "https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json", + "version": "2.0" + } + } + ] + } +} From 56509bbb4d868454d01e3b7ce9dffd8bdb658e58 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 27 Nov 2024 12:51:38 +0100 Subject: [PATCH 065/176] Use new path in tests --- cmd/csaf_aggregator/client_test.go | 2 +- cmd/csaf_checker/links_test.go | 2 +- cmd/csaf_downloader/downloader_test.go | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cmd/csaf_aggregator/client_test.go b/cmd/csaf_aggregator/client_test.go index c08b29a..fc5b095 100644 --- a/cmd/csaf_aggregator/client_test.go +++ b/cmd/csaf_aggregator/client_test.go @@ -14,7 +14,7 @@ import ( "net/http/httptest" "testing" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) func Test_downloadJSON(t *testing.T) { diff --git a/cmd/csaf_checker/links_test.go b/cmd/csaf_checker/links_test.go index aa04222..6baccf8 100644 --- a/cmd/csaf_checker/links_test.go +++ b/cmd/csaf_checker/links_test.go @@ -15,7 +15,7 @@ import ( "strings" "testing" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) const page0 = ` diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go index cf02035..1ae1524 100644 --- a/cmd/csaf_downloader/downloader_test.go +++ b/cmd/csaf_downloader/downloader_test.go @@ -19,8 +19,8 @@ import ( "strings" "testing" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/util" ) type ProviderParams struct { From b8a98033bf3721bdec6a055dfb07873e2306e512 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 28 Nov 2024 15:58:20 +0100 Subject: [PATCH 066/176] fix docs link to standard --- docs/proxy-provider-for-aggregator.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/proxy-provider-for-aggregator.md b/docs/proxy-provider-for-aggregator.md index f34d714..4148f52 100644 --- a/docs/proxy-provider-for-aggregator.md +++ b/docs/proxy-provider-for-aggregator.md @@ -5,7 +5,9 @@ calls it a *CSAF publisher*. After manually downloading the advisories from such a publisher, the tools here can be used to offer the CSAF files for automated downloading -as *CSAF aggregator*. (The construct is called *CSAF proxy provider*. See [Section 7.2.5](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html#725-role-csaf-aggregator) for more details.) +as *CSAF aggregator*. (The construct is called *CSAF proxy provider*. +See [Section 7.2.5](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html#725-role-csaf-aggregator) +for more details.) There are three necessary steps, easiest is to use one single virtual maschine (or container) per internal provider. From 1daaed2c516d3fd674eb99c39dfc5f87ba43f78a Mon Sep 17 00:00:00 2001 From: ncsc-ie-devs <112564016+ncsc-ie-devs@users.noreply.github.com> Date: Mon, 2 Dec 2024 10:42:54 +0000 Subject: [PATCH 067/176] ensure HTTP requests use proxy env vars (#597) * fix: ensure HTTP requests use proxy env vars Updated all instances of `http.Transport` to include the `Proxy` field set to `http.ProxyFromEnvironment`. This ensures that the application respects proxy configuration defined by the `HTTP_PROXY`, `HTTPS_PROXY`, and `NO_PROXY` environment variables. ### Changes: - Modified `http.Transport` initialization across the codebase to use: ```go Proxy: http.ProxyFromEnvironment ``` - Ensured TLS configurations remain intact by preserving `TLSClientConfig`. ### Why: - Previously, HTTP requests bypassed proxy settings due to missing configuration in the transport layer. - This fix enables compatibility with proxied environments, aligning with standard Go behavior. ### Impact: - All HTTP and HTTPS traffic now adheres to proxy settings. - Domains listed in `NO_PROXY` bypass the proxy as expected. ### Verification: - Tested with proxy environment variables set (`HTTP_PROXY`, `HTTPS_PROXY`). - Verified requests route through the proxy and `NO_PROXY` works as intended. * reformat with fmt --------- Co-authored-by: Cormac Doherty --- cmd/csaf_aggregator/config.go | 1 + cmd/csaf_checker/processor.go | 2 ++ cmd/csaf_downloader/downloader.go | 1 + cmd/csaf_downloader/forwarder.go | 1 + cmd/csaf_uploader/processor.go | 1 + 5 files changed, 6 insertions(+) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 81db0b7..3c2c46b 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -284,6 +284,7 @@ func (c *config) httpClient(p *provider) util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 5fd3fbd..5d1b69b 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -430,6 +430,7 @@ func (p *processor) fullClient() util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) @@ -460,6 +461,7 @@ func (p *processor) basicClient() *http.Client { if p.cfg.Insecure { tr := &http.Transport{ TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + Proxy: http.ProxyFromEnvironment, } return &http.Client{Transport: tr} } diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index f21fcc0..b7e7342 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -121,6 +121,7 @@ func (d *downloader) httpClient() util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index 12d9fe4..1598283 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -106,6 +106,7 @@ func (f *forwarder) httpClient() util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) diff --git a/cmd/csaf_uploader/processor.go b/cmd/csaf_uploader/processor.go index b57cafb..f655e02 100644 --- a/cmd/csaf_uploader/processor.go +++ b/cmd/csaf_uploader/processor.go @@ -51,6 +51,7 @@ func (p *processor) httpClient() *http.Client { client.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } return &client From 57953e495f10c26312a05eec3d1e7acb2a40e363 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 13:23:57 +0100 Subject: [PATCH 068/176] Warn if no remote validator was specified --- cmd/csaf_validator/main.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index b07c2f4..6985509 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -69,6 +69,8 @@ func run(opts *options, files []string) error { "preparing remote validator failed: %w", err) } defer validator.Close() + } else { + log.Printf("warn: no remote validator specified") } // Select amount level of output for remote validation. From 938ceb872ac4b5460379c86b89b6ca0db6ed72f2 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 13:53:56 +0100 Subject: [PATCH 069/176] Return exit code based on validation result --- cmd/csaf_validator/main.go | 13 +++++++++++++ docs/csaf_validator.md | 7 +++++++ 2 files changed, 20 insertions(+) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 6985509..4a9e827 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -22,6 +22,13 @@ import ( "github.com/gocsaf/csaf/v3/util" ) +const ( + exitCodeAllValid = 0 + exitCodeSchemaInvalid = 1 << 0 + exitCodeNoRemoteValidator = 1 << 1 + exitCodeFailedRemoteValidation = 1 << 2 +) + type options struct { Version bool `long:"version" description:"Display version of the binary"` RemoteValidator string `long:"validator" description:"URL to validate documents remotely" value-name:"URL"` @@ -53,6 +60,7 @@ func main() { // run validates the given files. func run(opts *options, files []string) error { + exitCode := exitCodeAllValid var validator csaf.RemoteValidator eval := util.NewPathEval() @@ -70,6 +78,7 @@ func run(opts *options, files []string) error { } defer validator.Close() } else { + exitCode |= exitCodeNoRemoteValidator log.Printf("warn: no remote validator specified") } @@ -106,6 +115,7 @@ func run(opts *options, files []string) error { } if len(validationErrs) > 0 { + exitCode |= exitCodeSchemaInvalid fmt.Printf("schema validation errors of %q\n", file) for _, vErr := range validationErrs { fmt.Printf(" * %s\n", vErr) @@ -132,12 +142,15 @@ func run(opts *options, files []string) error { if rvr.Valid { passes = "passes" } else { + exitCode |= exitCodeFailedRemoteValidation passes = "does not pass" } fmt.Printf("%q %s remote validation.\n", file, passes) } } + // Exit code is based on validation results + os.Exit(exitCodeAllValid) return nil } diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index dfa0c9a..74dbaaf 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -2,6 +2,13 @@ is a tool to validate local advisories files against the JSON Schema and an optional remote validator. +### Exit codes +If no fatal error occurs the program will exit with the following codes: +- `0`: all valid +- `2⁰`: schema invalid +- `2¹`: no remote validator configured +- `2²`: failure in remote validation + ### Usage ``` From 16e86051c5d1b0912a179eb2b30ba568da4e81ce Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Wed, 4 Dec 2024 14:27:24 +0100 Subject: [PATCH 070/176] Be more precise about exit codes. --- cmd/csaf_validator/main.go | 8 ++++---- docs/csaf_validator.md | 10 +++++----- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 4a9e827..9e844b7 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -23,10 +23,10 @@ import ( ) const ( - exitCodeAllValid = 0 - exitCodeSchemaInvalid = 1 << 0 - exitCodeNoRemoteValidator = 1 << 1 - exitCodeFailedRemoteValidation = 1 << 2 + exitCodeSchemaInvalid = 1 << iota + exitCodeNoRemoteValidator + exitCodeFailedRemoteValidation + exitCodeAllValid = 0 ) type options struct { diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index 74dbaaf..64ded6d 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -3,11 +3,11 @@ is a tool to validate local advisories files against the JSON Schema and an optional remote validator. ### Exit codes -If no fatal error occurs the program will exit with the following codes: -- `0`: all valid -- `2⁰`: schema invalid -- `2¹`: no remote validator configured -- `2²`: failure in remote validation +If no fatal error occurs the program will exit with an exit code `n` with the following conditions: +- `n == 0`: all valid +- `(n / 2) % 1 == 1`: schema validation failed +- `(n / 4) % 1 == 1`: no remote validator configured +- `(n / 8) % 1 == 1`: failure in remote validation ### Usage From a51964be3f6a9360ed0c4e05ccc5bcc8418d0f7e Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 16:02:03 +0100 Subject: [PATCH 071/176] Add initial csaf_checker provider test --- cmd/csaf_checker/processor_test.go | 103 +++++++++++++++++++++++++ cmd/csaf_downloader/downloader_test.go | 62 +-------------- internal/testutil/testutil.go | 73 ++++++++++++++++++ 3 files changed, 179 insertions(+), 59 deletions(-) create mode 100644 cmd/csaf_checker/processor_test.go create mode 100644 internal/testutil/testutil.go diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go new file mode 100644 index 0000000..b8b1b1f --- /dev/null +++ b/cmd/csaf_checker/processor_test.go @@ -0,0 +1,103 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +package main + +import ( + "net/http/httptest" + "testing" + + "github.com/gocsaf/csaf/v3/internal/testutil" + "github.com/gocsaf/csaf/v3/util" +) + +func TestShaMarking(t *testing.T) { + tests := []struct { + name string + directoryProvider bool + enableSha256 bool + enableSha512 bool + }{ + { + name: "deliver sha256 and sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + }, + { + name: "only deliver sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: false, + }, + { + name: "only deliver sha512", + directoryProvider: false, + enableSha256: false, + enableSha512: true, + }, + { + name: "only deliver sha256 and sha512, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: true, + }, + { + name: "only deliver sha256, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: false, + }, + { + name: "only deliver sha512, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: true, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + serverURL := "" + params := testutil.ProviderParams{ + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + } + server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) + defer server.Close() + + serverURL = server.URL + params.URL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + cfg := config{} + err := cfg.prepare() + if err != nil { + t.Fatalf("SHA marking config failed: %v", err) + } + p, err := newProcessor(&cfg) + if err != nil { + t.Fatalf("could not init downloader: %v", err) + } + p.client = client + + // TODO check result of processor + _, err = p.run([]string{serverURL + "/provider-metadata.json"}) + if err != nil { + t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) + } + p.close() + }) + } +} diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go index 1ae1524..d7eaae3 100644 --- a/cmd/csaf_downloader/downloader_test.go +++ b/cmd/csaf_downloader/downloader_test.go @@ -11,72 +11,16 @@ package main import ( "context" "errors" - "html/template" "log/slog" - "net/http" "net/http/httptest" "os" - "strings" "testing" "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/testutil" "github.com/gocsaf/csaf/v3/util" ) -type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool -} - -func ProviderHandler(params *ProviderParams, directoryProvider bool) http.HandlerFunc { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - path := "../../testdata/" - if directoryProvider { - path += "simple-directory-provider" - } else { - path += "simple-rolie-provider" - } - - path += r.URL.Path - - if strings.HasSuffix(r.URL.Path, "/") { - path += "index.html" - } - - content, err := os.ReadFile(path) - if err != nil { - w.WriteHeader(http.StatusNotFound) - return - } - switch { - case strings.HasSuffix(path, ".html"): - w.Header().Add("Content-Type", "text/html") - case strings.HasSuffix(path, ".json"): - w.Header().Add("Content-Type", "application/json") - case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: - w.WriteHeader(http.StatusNotFound) - return - case strings.HasSuffix(path, ".sha512") && directoryProvider && !params.EnableSha512: - w.WriteHeader(http.StatusNotFound) - return - default: - w.Header().Add("Content-Type", "text/plain") - } - - tmplt, err := template.New("base").Parse(string(content)) - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - return - } - err = tmplt.Execute(w, params) - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - return - } - }) -} - func checkIfFileExists(path string, t *testing.T) bool { if _, err := os.Stat(path); err == nil { return true @@ -169,12 +113,12 @@ func TestShaMarking(t *testing.T) { t.Run(test.name, func(tt *testing.T) { tt.Parallel() serverURL := "" - params := ProviderParams{ + params := testutil.ProviderParams{ URL: "", EnableSha256: test.enableSha256, EnableSha512: test.enableSha512, } - server := httptest.NewTLSServer(ProviderHandler(¶ms, test.directoryProvider)) + server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) defer server.Close() serverURL = server.URL diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go new file mode 100644 index 0000000..455d217 --- /dev/null +++ b/internal/testutil/testutil.go @@ -0,0 +1,73 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +// Package testutil contains shared helper functions for testing the application. +package testutil + +import ( + "html/template" + "net/http" + "os" + "strings" +) + +// ProviderParams configures the test provider. +type ProviderParams struct { + URL string + EnableSha256 bool + EnableSha512 bool +} + +// ProviderHandler returns a test provider handler with the specified configuration. +func ProviderHandler(params *ProviderParams, directoryProvider bool) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + path := "../../testdata/" + if directoryProvider { + path += "simple-directory-provider" + } else { + path += "simple-rolie-provider" + } + + path += r.URL.Path + + if strings.HasSuffix(r.URL.Path, "/") { + path += "index.html" + } + + content, err := os.ReadFile(path) + if err != nil { + w.WriteHeader(http.StatusNotFound) + return + } + switch { + case strings.HasSuffix(path, ".html"): + w.Header().Add("Content-Type", "text/html") + case strings.HasSuffix(path, ".json"): + w.Header().Add("Content-Type", "application/json") + case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: + w.WriteHeader(http.StatusNotFound) + return + case strings.HasSuffix(path, ".sha512") && directoryProvider && !params.EnableSha512: + w.WriteHeader(http.StatusNotFound) + return + default: + w.Header().Add("Content-Type", "text/plain") + } + + tmplt, err := template.New("base").Parse(string(content)) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + err = tmplt.Execute(w, params) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + } +} From 5b6af7a4ad26bb53795e94fe3576a636b0b81df1 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 17:52:00 +0100 Subject: [PATCH 072/176] WIP: Add requirement tests --- cmd/csaf_checker/processor_test.go | 106 +++++++++++++++++- testdata/simple-rolie-provider/service.json | 2 +- .../white/white-feed.json | 14 +-- 3 files changed, 112 insertions(+), 10 deletions(-) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index b8b1b1f..73574bd 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -9,55 +9,150 @@ package main import ( + "fmt" "net/http/httptest" + "reflect" "testing" "github.com/gocsaf/csaf/v3/internal/testutil" "github.com/gocsaf/csaf/v3/util" ) +func getBaseRequirements(url string) []Requirement { + return []Requirement{ + { + Num: 1, + Description: "Valid CSAF documents", + Messages: []Message{{Type: 1, Text: "No remote validator configured"}, {Type: 0, Text: "All advisories validated fine against the schema."}}, + }, { + Num: 2, + Description: "Filename", + Messages: []Message{{Type: 0, Text: "All found filenames are conforming."}}}, + { + Num: 3, + Description: "TLS", + Messages: []Message{{Type: 0, Text: "All tested URLs were HTTPS."}}}, + { + Num: 4, + Description: "TLP:WHITE", + Messages: []Message{{Type: 0, Text: "All advisories labeled TLP:WHITE were freely accessible."}}}, + { + Num: 5, + Description: "TLP:AMBER and TLP:RED", + Messages: []Message{ + {Type: 0, Text: "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility."}}}, + { + Num: 6, + Description: "Redirects", + Messages: []Message{{Type: 0, Text: "No redirections found."}}}, + { + Num: 7, + Description: "provider-metadata.json", + Messages: []Message{{Type: 0, Text: "Found good provider metadata."}}}, + { + Num: 8, + Description: "security.txt", + Messages: []Message{{Type: 0, Text: "Performed no test of security.txt since the direct url of the provider-metadata.json was used."}}}, + { + Num: 9, + Description: "/.well-known/csaf/provider-metadata.json", + Messages: []Message{{Type: 0, Text: "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used."}}}, + { + Num: 10, + Description: "DNS path", + Messages: []Message{{Type: 0, Text: "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used."}}}, + { + Num: 11, + Description: "One folder per year", + Messages: []Message{{Type: 2, Text: fmt.Sprintf("No year folder found in %s/white/avendor-advisory-0004.json", url)}}}, + { + Num: 12, + Description: "index.txt", + Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/index.txt", url)}}}, + { + Num: 13, + Description: "changes.csv", + Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/changes.csv", url)}}}, + { + Num: 14, + Description: "Directory listings", + Messages: []Message{{Type: 0, Text: "All directory listings are valid."}}}, + { + Num: 15, + Description: "ROLIE feed", + Messages: []Message{{Type: 2, Text: "ROLIE feed based distribution was not used."}}}, + { + Num: 16, + Description: "ROLIE service document", + Messages: []Message{{Type: 1, Text: "No ROLIE service document found."}}}, + { + Num: 17, + Description: "ROLIE category document", + Messages: []Message{{Type: 1, Text: "No ROLIE category document found."}}}, + { + Num: 18, + Description: "Integrity", + Messages: []Message{{Type: 0, Text: "All checksums match."}}}, + { + Num: 19, + Description: "Signatures", + Messages: []Message{{Type: 0, Text: "All signatures verified."}}}, + { + Num: 20, + Description: "Public OpenPGP Key", + Messages: []Message{{Type: 0, Text: "1 public OpenPGP key(s) loaded."}}}, + } +} + func TestShaMarking(t *testing.T) { tests := []struct { name string directoryProvider bool enableSha256 bool enableSha512 bool + expected func(string) []Requirement }{ { name: "deliver sha256 and sha512", directoryProvider: false, enableSha256: true, enableSha512: true, + expected: getBaseRequirements, }, { name: "only deliver sha256", directoryProvider: false, enableSha256: true, enableSha512: false, + expected: getBaseRequirements, }, { name: "only deliver sha512", directoryProvider: false, enableSha256: false, enableSha512: true, + expected: getBaseRequirements, }, { name: "only deliver sha256 and sha512, directory provider", directoryProvider: true, enableSha256: true, enableSha512: true, + expected: getBaseRequirements, }, { name: "only deliver sha256, directory provider", directoryProvider: true, enableSha256: true, enableSha512: false, + expected: getBaseRequirements, }, { name: "only deliver sha512, directory provider", directoryProvider: true, enableSha256: false, enableSha512: true, + expected: getBaseRequirements, }, } @@ -92,11 +187,18 @@ func TestShaMarking(t *testing.T) { } p.client = client - // TODO check result of processor - _, err = p.run([]string{serverURL + "/provider-metadata.json"}) + report, err := p.run([]string{serverURL + "/provider-metadata.json"}) if err != nil { t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) } + expected := test.expected(serverURL) + for i, got := range report.Domains[0].Requirements { + want := expected[i] + if !reflect.DeepEqual(*got, want) { + t.Errorf("SHA marking %v: Expected %v, got %v", test.name, want, *got) + } + } + p.close() }) } diff --git a/testdata/simple-rolie-provider/service.json b/testdata/simple-rolie-provider/service.json index 500d882..a398a40 100644 --- a/testdata/simple-rolie-provider/service.json +++ b/testdata/simple-rolie-provider/service.json @@ -6,7 +6,7 @@ "collection": [ { "title": "CSAF feed (TLP:WHITE)", - "href": "/white/white-feed.json", + "href": "{{.URL}}/white/white-feed.json", "categories": { "category": [ { diff --git a/testdata/simple-rolie-provider/white/white-feed.json b/testdata/simple-rolie-provider/white/white-feed.json index 1bc17bc..923a492 100644 --- a/testdata/simple-rolie-provider/white/white-feed.json +++ b/testdata/simple-rolie-provider/white/white-feed.json @@ -5,11 +5,11 @@ "link": [ { "rel": "self", - "href": "/white/csaf-feed-tlp-white.json" + "href": "{{.URL}}/white/csaf-feed-tlp-white.json" }, { "rel": "service", - "href": "/service.json" + "href": "{{.URL}}/service.json" } ], "category": [ @@ -26,30 +26,30 @@ "link": [ { "rel": "self", - "href": "/white/avendor-advisory-0004.json" + "href": "{{.URL}}/white/avendor-advisory-0004.json" }, {{if .EnableSha256}} { "rel": "hash", - "href": "/white/avendor-advisory-0004.json.sha256" + "href": "{{.URL}}/white/avendor-advisory-0004.json.sha256" }, {{end}} {{if .EnableSha512}} { "rel": "hash", - "href": "/white/avendor-advisory-0004.json.sha512" + "href": "{{.URL}}/white/avendor-advisory-0004.json.sha512" }, {{end}} { "rel": "signature", - "href": "/white/avendor-advisory-0004.json.asc" + "href": "{{.URL}}/white/avendor-advisory-0004.json.asc" } ], "published": "2020-01-01T00:00:00Z", "updated": "2020-01-01T00:00:00Z", "content": { "type": "application/json", - "src": "/avendor-advisory-0004.json" + "src": "{{.URL}}/avendor-advisory-0004.json" }, "format": { "schema": "https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json", From 68bd04676cc425dca87751bca989457baf5f56a1 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 6 Dec 2024 13:11:07 +0100 Subject: [PATCH 073/176] Add requirement checker test data --- cmd/csaf_checker/processor_test.go | 147 +++++------- .../sha256-directory.json | 206 +++++++++++++++++ .../processor-requirements/sha256-rolie.json | 210 ++++++++++++++++++ .../sha256-sha512-directory.json | 206 +++++++++++++++++ .../sha256-sha512-rolie.json | 210 ++++++++++++++++++ .../sha512-directory.json | 207 +++++++++++++++++ .../processor-requirements/sha512-rolie.json | 210 ++++++++++++++++++ 7 files changed, 1299 insertions(+), 97 deletions(-) create mode 100644 testdata/processor-requirements/sha256-directory.json create mode 100644 testdata/processor-requirements/sha256-rolie.json create mode 100644 testdata/processor-requirements/sha256-sha512-directory.json create mode 100644 testdata/processor-requirements/sha256-sha512-rolie.json create mode 100644 testdata/processor-requirements/sha512-directory.json create mode 100644 testdata/processor-requirements/sha512-rolie.json diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 73574bd..c4fb532 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -9,99 +9,54 @@ package main import ( - "fmt" + "bytes" + "encoding/json" "net/http/httptest" + "os" "reflect" "testing" + "text/template" "github.com/gocsaf/csaf/v3/internal/testutil" "github.com/gocsaf/csaf/v3/util" ) -func getBaseRequirements(url string) []Requirement { - return []Requirement{ - { - Num: 1, - Description: "Valid CSAF documents", - Messages: []Message{{Type: 1, Text: "No remote validator configured"}, {Type: 0, Text: "All advisories validated fine against the schema."}}, - }, { - Num: 2, - Description: "Filename", - Messages: []Message{{Type: 0, Text: "All found filenames are conforming."}}}, - { - Num: 3, - Description: "TLS", - Messages: []Message{{Type: 0, Text: "All tested URLs were HTTPS."}}}, - { - Num: 4, - Description: "TLP:WHITE", - Messages: []Message{{Type: 0, Text: "All advisories labeled TLP:WHITE were freely accessible."}}}, - { - Num: 5, - Description: "TLP:AMBER and TLP:RED", - Messages: []Message{ - {Type: 0, Text: "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility."}}}, - { - Num: 6, - Description: "Redirects", - Messages: []Message{{Type: 0, Text: "No redirections found."}}}, - { - Num: 7, - Description: "provider-metadata.json", - Messages: []Message{{Type: 0, Text: "Found good provider metadata."}}}, - { - Num: 8, - Description: "security.txt", - Messages: []Message{{Type: 0, Text: "Performed no test of security.txt since the direct url of the provider-metadata.json was used."}}}, - { - Num: 9, - Description: "/.well-known/csaf/provider-metadata.json", - Messages: []Message{{Type: 0, Text: "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used."}}}, - { - Num: 10, - Description: "DNS path", - Messages: []Message{{Type: 0, Text: "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used."}}}, - { - Num: 11, - Description: "One folder per year", - Messages: []Message{{Type: 2, Text: fmt.Sprintf("No year folder found in %s/white/avendor-advisory-0004.json", url)}}}, - { - Num: 12, - Description: "index.txt", - Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/index.txt", url)}}}, - { - Num: 13, - Description: "changes.csv", - Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/changes.csv", url)}}}, - { - Num: 14, - Description: "Directory listings", - Messages: []Message{{Type: 0, Text: "All directory listings are valid."}}}, - { - Num: 15, - Description: "ROLIE feed", - Messages: []Message{{Type: 2, Text: "ROLIE feed based distribution was not used."}}}, - { - Num: 16, - Description: "ROLIE service document", - Messages: []Message{{Type: 1, Text: "No ROLIE service document found."}}}, - { - Num: 17, - Description: "ROLIE category document", - Messages: []Message{{Type: 1, Text: "No ROLIE category document found."}}}, - { - Num: 18, - Description: "Integrity", - Messages: []Message{{Type: 0, Text: "All checksums match."}}}, - { - Num: 19, - Description: "Signatures", - Messages: []Message{{Type: 0, Text: "All signatures verified."}}}, - { - Num: 20, - Description: "Public OpenPGP Key", - Messages: []Message{{Type: 0, Text: "1 public OpenPGP key(s) loaded."}}}, +func getRequirementTestData(t *testing.T, params testutil.ProviderParams, directoryProvider bool) []Requirement { + path := "../../testdata/processor-requirements/" + if params.EnableSha256 { + path += "sha256-" } + if params.EnableSha512 { + path += "sha512-" + } + if directoryProvider { + path += "directory" + } else { + path += "rolie" + } + path += ".json" + + content, err := os.ReadFile(path) + if err != nil { + t.Fatal(err) + } + + tmplt, err := template.New("base").Parse(string(content)) + if err != nil { + t.Fatal(err) + } + + var output bytes.Buffer + err = tmplt.Execute(&output, params) + if err != nil { + t.Fatal(err) + } + var requirement []Requirement + err = json.Unmarshal(output.Bytes(), &requirement) + if err != nil { + t.Fatal(err) + } + return requirement } func TestShaMarking(t *testing.T) { @@ -110,49 +65,42 @@ func TestShaMarking(t *testing.T) { directoryProvider bool enableSha256 bool enableSha512 bool - expected func(string) []Requirement }{ { name: "deliver sha256 and sha512", directoryProvider: false, enableSha256: true, enableSha512: true, - expected: getBaseRequirements, }, { name: "only deliver sha256", directoryProvider: false, enableSha256: true, enableSha512: false, - expected: getBaseRequirements, }, { name: "only deliver sha512", directoryProvider: false, enableSha256: false, enableSha512: true, - expected: getBaseRequirements, }, { - name: "only deliver sha256 and sha512, directory provider", + name: "deliver sha256 and sha512, directory provider", directoryProvider: true, enableSha256: true, enableSha512: true, - expected: getBaseRequirements, }, { name: "only deliver sha256, directory provider", directoryProvider: true, enableSha256: true, enableSha512: false, - expected: getBaseRequirements, }, { name: "only deliver sha512, directory provider", directoryProvider: true, enableSha256: false, enableSha512: true, - expected: getBaseRequirements, }, } @@ -191,11 +139,16 @@ func TestShaMarking(t *testing.T) { if err != nil { t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) } - expected := test.expected(serverURL) - for i, got := range report.Domains[0].Requirements { - want := expected[i] - if !reflect.DeepEqual(*got, want) { - t.Errorf("SHA marking %v: Expected %v, got %v", test.name, want, *got) + expected := getRequirementTestData(t, + testutil.ProviderParams{ + URL: serverURL, + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + }, + test.directoryProvider) + for i, want := range report.Domains[0].Requirements { + if !reflect.DeepEqual(expected[i], *want) { + t.Errorf("SHA marking %v: Expected %v, got %v", test.name, *want, expected[i]) } } diff --git a/testdata/processor-requirements/sha256-directory.json b/testdata/processor-requirements/sha256-directory.json new file mode 100644 index 0000000..a106977 --- /dev/null +++ b/testdata/processor-requirements/sha256-directory.json @@ -0,0 +1,206 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] \ No newline at end of file diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json new file mode 100644 index 0000000..5875174 --- /dev/null +++ b/testdata/processor-requirements/sha256-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-directory.json b/testdata/processor-requirements/sha256-sha512-directory.json new file mode 100644 index 0000000..3e30b9a --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-directory.json @@ -0,0 +1,206 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-rolie.json b/testdata/processor-requirements/sha256-sha512-rolie.json new file mode 100644 index 0000000..5875174 --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha512-directory.json b/testdata/processor-requirements/sha512-directory.json new file mode 100644 index 0000000..e47e1f9 --- /dev/null +++ b/testdata/processor-requirements/sha512-directory.json @@ -0,0 +1,207 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] + diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json new file mode 100644 index 0000000..5875174 --- /dev/null +++ b/testdata/processor-requirements/sha512-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] From df65ad13cbd222d2a2b1784287bd9e2e8b22ba7b Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 10 Dec 2024 10:13:42 +0100 Subject: [PATCH 074/176] Fix: return correct exit code --- cmd/csaf_validator/main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 9e844b7..1a34be0 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -150,7 +150,7 @@ func run(opts *options, files []string) error { } // Exit code is based on validation results - os.Exit(exitCodeAllValid) + os.Exit(exitCode) return nil } From fc404e499c90ead7643bbbbba4b75855bdbfe938 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Fri, 13 Dec 2024 13:33:22 +0100 Subject: [PATCH 075/176] Unfix: Add should-states --- testdata/processor-requirements/sha256-directory.json | 2 +- testdata/processor-requirements/sha256-rolie.json | 2 +- testdata/processor-requirements/sha512-directory.json | 2 +- testdata/processor-requirements/sha512-rolie.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/testdata/processor-requirements/sha256-directory.json b/testdata/processor-requirements/sha256-directory.json index a106977..46b4049 100644 --- a/testdata/processor-requirements/sha256-directory.json +++ b/testdata/processor-requirements/sha256-directory.json @@ -178,7 +178,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 0, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json index 5875174..61c5ccf 100644 --- a/testdata/processor-requirements/sha256-rolie.json +++ b/testdata/processor-requirements/sha256-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 1, + "type": 0, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha512-directory.json b/testdata/processor-requirements/sha512-directory.json index e47e1f9..5102fab 100644 --- a/testdata/processor-requirements/sha512-directory.json +++ b/testdata/processor-requirements/sha512-directory.json @@ -178,7 +178,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 0, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json index 5875174..61c5ccf 100644 --- a/testdata/processor-requirements/sha512-rolie.json +++ b/testdata/processor-requirements/sha512-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 1, + "type": 0, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] From a3d6d6acfb3fed53967ae8c024ddc2b565bd284b Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Dec 2024 14:26:00 +0100 Subject: [PATCH 076/176] Downgrade error to info in directory hash fetching --- cmd/csaf_checker/processor.go | 9 +++++++-- cmd/csaf_checker/processor_test.go | 6 +++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 7972e2b..eed561a 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -776,8 +776,13 @@ func (p *processor) integrity( continue } if res.StatusCode != http.StatusOK { - p.badIntegrities.error("Fetching %s failed: Status code %d (%s)", - hashFile, res.StatusCode, res.Status) + if f.IsDirectory() { + p.badIntegrities.info("Fetching %s failed: Status code %d (%s)", + hashFile, res.StatusCode, res.Status) + } else { + p.badIntegrities.error("Fetching %s failed: Status code %d (%s)", + hashFile, res.StatusCode, res.Status) + } continue } h, err := func() ([]byte, error) { diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index c4fb532..ea5aed5 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -146,9 +146,9 @@ func TestShaMarking(t *testing.T) { EnableSha512: test.enableSha512, }, test.directoryProvider) - for i, want := range report.Domains[0].Requirements { - if !reflect.DeepEqual(expected[i], *want) { - t.Errorf("SHA marking %v: Expected %v, got %v", test.name, *want, expected[i]) + for i, got := range report.Domains[0].Requirements { + if !reflect.DeepEqual(expected[i], *got) { + t.Errorf("SHA marking %v: Expected %v, got %v", test.name, expected[i], *got) } } From ebd96011fcfd38a6a6c8c82ab2a9e99d8aee3f8c Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Dec 2024 14:38:49 +0100 Subject: [PATCH 077/176] Revert new requirement 17 test Changing the ROLIE category fetching warning to info can be addressed later. --- testdata/processor-requirements/sha256-rolie.json | 2 +- testdata/processor-requirements/sha512-rolie.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json index 61c5ccf..5875174 100644 --- a/testdata/processor-requirements/sha256-rolie.json +++ b/testdata/processor-requirements/sha256-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 0, + "type": 1, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json index 61c5ccf..5875174 100644 --- a/testdata/processor-requirements/sha512-rolie.json +++ b/testdata/processor-requirements/sha512-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 0, + "type": 1, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] From 9dd4b7fc8dca06e7eb87e54da60680fd4f8a6b41 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Dec 2024 15:54:39 +0100 Subject: [PATCH 078/176] Add tests for no hash given or available --- cmd/csaf_checker/processor.go | 10 +- cmd/csaf_checker/processor_test.go | 101 ++++++--- internal/testutil/testutil.go | 10 +- .../processor-requirements/directory.json | 210 +++++++++++++++++ testdata/processor-requirements/rolie.json | 210 +++++++++++++++++ ...256-sha512-forbid-hash-fetching-rolie.json | 214 ++++++++++++++++++ 6 files changed, 711 insertions(+), 44 deletions(-) create mode 100644 testdata/processor-requirements/directory.json create mode 100644 testdata/processor-requirements/rolie.json create mode 100644 testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index eed561a..b913864 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -20,7 +20,6 @@ import ( "fmt" "io" "log" - "log/slog" "net/http" "net/url" "path/filepath" @@ -586,14 +585,11 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { switch { case sha256 == "" && sha512 == "": - slog.Error("No hash listed on ROLIE feed", "file", url) - return + p.badROLIEFeed.error("No hash listed on ROLIE feed %s", url) case sign == "": - slog.Error("No signature listed on ROLIE feed", "file", url) - return - default: - file = csaf.PlainAdvisoryFile{Path: url, SHA256: sha256, SHA512: sha512, Sign: sign} + p.badROLIEFeed.error("No signature listed on ROLIE feed %s", url) } + file = csaf.PlainAdvisoryFile{Path: url, SHA256: sha256, SHA512: sha512, Sign: sign} files = append(files, file) }) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index ea5aed5..5b0241e 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -29,6 +29,9 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct if params.EnableSha512 { path += "sha512-" } + if params.ForbidHashFetching { + path += "forbid-hash-fetching-" + } if directoryProvider { path += "directory" } else { @@ -61,46 +64,74 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct func TestShaMarking(t *testing.T) { tests := []struct { - name string - directoryProvider bool - enableSha256 bool - enableSha512 bool + name string + directoryProvider bool + enableSha256 bool + enableSha512 bool + forbidHashFetching bool }{ { - name: "deliver sha256 and sha512", - directoryProvider: false, - enableSha256: true, - enableSha512: true, + name: "deliver sha256 and sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidHashFetching: false, }, { - name: "only deliver sha256", - directoryProvider: false, - enableSha256: true, - enableSha512: false, + name: "enable sha256 and sha512, forbid fetching", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidHashFetching: true, }, { - name: "only deliver sha512", - directoryProvider: false, - enableSha256: false, - enableSha512: true, + name: "only deliver sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: false, + forbidHashFetching: false, }, { - name: "deliver sha256 and sha512, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: true, + name: "only deliver sha512", + directoryProvider: false, + enableSha256: false, + enableSha512: true, + forbidHashFetching: false, }, { - name: "only deliver sha256, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: false, + name: "deliver sha256 and sha512, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: true, + forbidHashFetching: false, }, { - name: "only deliver sha512, directory provider", - directoryProvider: true, - enableSha256: false, - enableSha512: true, + name: "only deliver sha256, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: false, + forbidHashFetching: false, + }, + { + name: "only deliver sha512, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: true, + forbidHashFetching: false, + }, + { + name: "no hash", + directoryProvider: false, + enableSha256: false, + enableSha512: false, + forbidHashFetching: false, + }, + { + name: "no hash, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: false, + forbidHashFetching: false, }, } @@ -111,9 +142,10 @@ func TestShaMarking(t *testing.T) { tt.Parallel() serverURL := "" params := testutil.ProviderParams{ - URL: "", - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidHashFetching: test.forbidHashFetching, } server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) defer server.Close() @@ -141,9 +173,10 @@ func TestShaMarking(t *testing.T) { } expected := getRequirementTestData(t, testutil.ProviderParams{ - URL: serverURL, - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, + URL: serverURL, + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidHashFetching: test.forbidHashFetching, }, test.directoryProvider) for i, got := range report.Domains[0].Requirements { diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go index 455d217..e933742 100644 --- a/internal/testutil/testutil.go +++ b/internal/testutil/testutil.go @@ -18,9 +18,10 @@ import ( // ProviderParams configures the test provider. type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool + URL string + EnableSha256 bool + EnableSha512 bool + ForbidHashFetching bool } // ProviderHandler returns a test provider handler with the specified configuration. @@ -49,6 +50,9 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle w.Header().Add("Content-Type", "text/html") case strings.HasSuffix(path, ".json"): w.Header().Add("Content-Type", "application/json") + case (strings.HasSuffix(path, ".sha256") || strings.HasSuffix(path, ".sha512")) && params.ForbidHashFetching: + w.WriteHeader(http.StatusForbidden) + return case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: w.WriteHeader(http.StatusNotFound) return diff --git a/testdata/processor-requirements/directory.json b/testdata/processor-requirements/directory.json new file mode 100644 index 0000000..ed61fcc --- /dev/null +++ b/testdata/processor-requirements/directory.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 404 (404 Not Found)" + }, + { + "type": 0, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] \ No newline at end of file diff --git a/testdata/processor-requirements/rolie.json b/testdata/processor-requirements/rolie.json new file mode 100644 index 0000000..cd65a7e --- /dev/null +++ b/testdata/processor-requirements/rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "No hash listed on ROLIE feed {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json new file mode 100644 index 0000000..03359f0 --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json @@ -0,0 +1,214 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 403 (403 Forbidden)" + }, + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 403 (403 Forbidden)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] From b1a76207636a7c312c94344b44546116f31c5641 Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 16 Dec 2024 12:23:10 +0100 Subject: [PATCH 079/176] Extend processor SHA fetching tests Allow to forbid individual hashes from downloading. This allows to for testing the behavior, if one of the hashes could not be downloaded. --- cmd/csaf_checker/processor_test.go | 119 +++++++++--------- internal/testutil/testutil.go | 14 ++- ...12-forbid-sha256-forbid-sha512-rolie.json} | 0 3 files changed, 68 insertions(+), 65 deletions(-) rename testdata/processor-requirements/{sha256-sha512-forbid-hash-fetching-rolie.json => sha256-sha512-forbid-sha256-forbid-sha512-rolie.json} (100%) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 5b0241e..9e3f112 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -29,8 +29,11 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct if params.EnableSha512 { path += "sha512-" } - if params.ForbidHashFetching { - path += "forbid-hash-fetching-" + if params.ForbidSha256 { + path += "forbid-sha256-" + } + if params.ForbidSha512 { + path += "forbid-sha512-" } if directoryProvider { path += "directory" @@ -64,74 +67,68 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct func TestShaMarking(t *testing.T) { tests := []struct { - name string - directoryProvider bool - enableSha256 bool - enableSha512 bool - forbidHashFetching bool + name string + directoryProvider bool + enableSha256 bool + enableSha512 bool + forbidSha256 bool + forbidSha512 bool }{ { - name: "deliver sha256 and sha512", - directoryProvider: false, - enableSha256: true, - enableSha512: true, - forbidHashFetching: false, + name: "deliver sha256 and sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, }, { - name: "enable sha256 and sha512, forbid fetching", - directoryProvider: false, - enableSha256: true, - enableSha512: true, - forbidHashFetching: true, + name: "enable sha256 and sha512, forbid fetching", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidSha256: true, + forbidSha512: true, }, { - name: "only deliver sha256", - directoryProvider: false, - enableSha256: true, - enableSha512: false, - forbidHashFetching: false, + name: "only deliver sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: false, }, { - name: "only deliver sha512", - directoryProvider: false, - enableSha256: false, - enableSha512: true, - forbidHashFetching: false, + name: "only deliver sha512", + directoryProvider: false, + enableSha256: false, + enableSha512: true, }, { - name: "deliver sha256 and sha512, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: true, - forbidHashFetching: false, + name: "deliver sha256 and sha512, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: true, }, { - name: "only deliver sha256, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: false, - forbidHashFetching: false, + name: "only deliver sha256, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: false, }, { - name: "only deliver sha512, directory provider", - directoryProvider: true, - enableSha256: false, - enableSha512: true, - forbidHashFetching: false, + name: "only deliver sha512, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: true, }, { - name: "no hash", - directoryProvider: false, - enableSha256: false, - enableSha512: false, - forbidHashFetching: false, + name: "no hash", + directoryProvider: false, + enableSha256: false, + enableSha512: false, }, { - name: "no hash, directory provider", - directoryProvider: true, - enableSha256: false, - enableSha512: false, - forbidHashFetching: false, + name: "no hash, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: false, }, } @@ -142,10 +139,11 @@ func TestShaMarking(t *testing.T) { tt.Parallel() serverURL := "" params := testutil.ProviderParams{ - URL: "", - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, - ForbidHashFetching: test.forbidHashFetching, + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidSha256: test.forbidSha256, + ForbidSha512: test.forbidSha512, } server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) defer server.Close() @@ -173,10 +171,11 @@ func TestShaMarking(t *testing.T) { } expected := getRequirementTestData(t, testutil.ProviderParams{ - URL: serverURL, - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, - ForbidHashFetching: test.forbidHashFetching, + URL: serverURL, + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidSha256: test.forbidSha256, + ForbidSha512: test.forbidSha512, }, test.directoryProvider) for i, got := range report.Domains[0].Requirements { diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go index e933742..c7bad68 100644 --- a/internal/testutil/testutil.go +++ b/internal/testutil/testutil.go @@ -18,10 +18,11 @@ import ( // ProviderParams configures the test provider. type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool - ForbidHashFetching bool + URL string + EnableSha256 bool + EnableSha512 bool + ForbidSha256 bool + ForbidSha512 bool } // ProviderHandler returns a test provider handler with the specified configuration. @@ -50,7 +51,10 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle w.Header().Add("Content-Type", "text/html") case strings.HasSuffix(path, ".json"): w.Header().Add("Content-Type", "application/json") - case (strings.HasSuffix(path, ".sha256") || strings.HasSuffix(path, ".sha512")) && params.ForbidHashFetching: + case (strings.HasSuffix(path, ".sha256")) && params.ForbidSha256: + w.WriteHeader(http.StatusForbidden) + return + case strings.HasSuffix(path, ".sha512") && params.ForbidSha512: w.WriteHeader(http.StatusForbidden) return case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: diff --git a/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha256-forbid-sha512-rolie.json similarity index 100% rename from testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json rename to testdata/processor-requirements/sha256-sha512-forbid-sha256-forbid-sha512-rolie.json From d38150c6a0d334300dfb3391964ea051c66aa4ce Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 16 Dec 2024 12:57:28 +0100 Subject: [PATCH 080/176] Add testdata for individual hash forbidden tests --- cmd/csaf_checker/processor_test.go | 16 ++ .../sha256-sha512-forbid-sha256-rolie.json | 210 ++++++++++++++++++ .../sha256-sha512-forbid-sha512-rolie.json | 210 ++++++++++++++++++ 3 files changed, 436 insertions(+) create mode 100644 testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json create mode 100644 testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 9e3f112..0710f32 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -88,6 +88,22 @@ func TestShaMarking(t *testing.T) { forbidSha256: true, forbidSha512: true, }, + { + name: "enable sha256 and sha512, forbid sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidSha256: true, + forbidSha512: false, + }, + { + name: "enable sha256 and sha512, forbid sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidSha256: false, + forbidSha512: true, + }, { name: "only deliver sha256", directoryProvider: false, diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json new file mode 100644 index 0000000..2a1f2a8 --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 403 (403 Forbidden)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json new file mode 100644 index 0000000..2a4c98f --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 403 (403 Forbidden)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] From bc5d149f74d2ce5e7ed03316141a31eafbd80ea1 Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 16 Dec 2024 19:28:24 +0100 Subject: [PATCH 081/176] Use exit code 1 for general errors, fix documentation --- cmd/csaf_validator/main.go | 2 +- docs/csaf_validator.md | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 1a34be0..346180b 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -23,7 +23,7 @@ import ( ) const ( - exitCodeSchemaInvalid = 1 << iota + exitCodeSchemaInvalid = 2 << iota exitCodeNoRemoteValidator exitCodeFailedRemoteValidation exitCodeAllValid = 0 diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index 64ded6d..a0e00bb 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -5,9 +5,10 @@ is a tool to validate local advisories files against the JSON Schema and an opti ### Exit codes If no fatal error occurs the program will exit with an exit code `n` with the following conditions: - `n == 0`: all valid -- `(n / 2) % 1 == 1`: schema validation failed -- `(n / 4) % 1 == 1`: no remote validator configured -- `(n / 8) % 1 == 1`: failure in remote validation +- `(n & 1) > 0`: general error, see logs +- `(n & 2) > 0`: schema validation failed +- `(n & 4) > 0`: no remote validator configured +- `(n & 8) > 0`: failure in remote validation ### Usage From 95ff418a270d618ffc2b6fb661e702cf7639d75f Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Wed, 18 Dec 2024 08:55:48 +0100 Subject: [PATCH 082/176] fix: Content-Type header for JSON responses * Remove `charset=utf-8` parameter, which is not allowed for JSON, according to rfc8259. --- cmd/csaf_provider/controller.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_provider/controller.go b/cmd/csaf_provider/controller.go index 7f64fe2..f04b7bd 100644 --- a/cmd/csaf_provider/controller.go +++ b/cmd/csaf_provider/controller.go @@ -174,7 +174,7 @@ func (c *controller) web( // writeJSON sets the header for the response and writes the JSON encoding of the given "content". // It logs out an error message in case of an error. func writeJSON(rw http.ResponseWriter, content any, code int) { - rw.Header().Set("Content-type", "application/json; charset=utf-8") + rw.Header().Set("Content-type", "application/json") rw.Header().Set("X-Content-Type-Options", "nosniff") rw.WriteHeader(code) if err := json.NewEncoder(rw).Encode(content); err != nil { From d8e903587a8744b51227da17867505da75a44c41 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 18 Dec 2024 15:37:58 +0100 Subject: [PATCH 083/176] Warn only if the other hash could be fetched --- cmd/csaf_checker/processor.go | 28 +++++++++++++------ .../sha256-sha512-forbid-sha256-rolie.json | 2 +- .../sha256-sha512-forbid-sha512-rolie.json | 2 +- 3 files changed, 22 insertions(+), 10 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index b913864..224e225 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -757,6 +757,9 @@ func (p *processor) integrity( hashes = append(hashes, hash{"SHA512", f.SHA512URL, s512.Sum(nil)}) } + couldFetchHash := false + hashFetchErrors := []string{} + for _, x := range hashes { hu, err := url.Parse(x.url()) if err != nil { @@ -768,19 +771,15 @@ func (p *processor) integrity( p.checkTLS(hashFile) if res, err = client.Get(hashFile); err != nil { - p.badIntegrities.error("Fetching %s failed: %v.", hashFile, err) + hashFetchErrors = append(hashFetchErrors, fmt.Sprintf("Fetching %s failed: %v.", hashFile, err)) continue } if res.StatusCode != http.StatusOK { - if f.IsDirectory() { - p.badIntegrities.info("Fetching %s failed: Status code %d (%s)", - hashFile, res.StatusCode, res.Status) - } else { - p.badIntegrities.error("Fetching %s failed: Status code %d (%s)", - hashFile, res.StatusCode, res.Status) - } + hashFetchErrors = append(hashFetchErrors, fmt.Sprintf("Fetching %s failed: Status code %d (%s)", + hashFile, res.StatusCode, res.Status)) continue } + couldFetchHash = true h, err := func() ([]byte, error) { defer res.Body.Close() return util.HashFromReader(res.Body) @@ -798,6 +797,19 @@ func (p *processor) integrity( x.ext, u, hashFile) } } + + msgType := ErrorType + // Log only as warning, if the other hash could be fetched + if couldFetchHash { + msgType = WarnType + } + if f.IsDirectory() { + msgType = InfoType + } + for _, fetchError := range hashFetchErrors { + p.badIntegrities.add(msgType, fetchError) + } + // Check signature su, err := url.Parse(f.SignURL()) if err != nil { diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json index 2a1f2a8..72a173a 100644 --- a/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json @@ -182,7 +182,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 1, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 403 (403 Forbidden)" } ] diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json index 2a4c98f..1ab8f1e 100644 --- a/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json @@ -182,7 +182,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 1, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 403 (403 Forbidden)" } ] From 8fc7f5bfad0c6022cbcc07cec36b875cb4ad292e Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 7 Jan 2025 12:23:40 +0100 Subject: [PATCH 084/176] Make documentation more explicit --- docs/csaf_validator.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index a0e00bb..87ec831 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -3,9 +3,11 @@ is a tool to validate local advisories files against the JSON Schema and an optional remote validator. ### Exit codes + If no fatal error occurs the program will exit with an exit code `n` with the following conditions: + - `n == 0`: all valid -- `(n & 1) > 0`: general error, see logs +- `(n & 1) > 0`: a general error occurred, all other flags are unset (see logs for more information) - `(n & 2) > 0`: schema validation failed - `(n & 4) > 0`: no remote validator configured - `(n & 8) > 0`: failure in remote validation From b8a5fa72d5d164b5996ec068de9c5e5e9bac15c5 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 8 Jan 2025 08:49:42 +0100 Subject: [PATCH 085/176] Fix nil check in downloader --- cmd/csaf_downloader/downloader.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index c8d92c1..ba6ccff 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -133,7 +133,7 @@ func (d *downloader) httpClient() util.Client { client := util.Client(&hClient) // Overwrite for testing purposes - if client != nil { + if d.client != nil { client = *d.client } From 9275a37a9faa07943b326ebded09559ef36a1084 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 8 Jan 2025 08:50:30 +0100 Subject: [PATCH 086/176] Format --- cmd/csaf_downloader/downloader.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index ba6ccff..88a63c2 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -514,7 +514,8 @@ nextAdvisory: url: file.SHA512URL(), warn: true, hashType: algSha512, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512))}) + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512)), + }) } else { slog.Info("SHA512 not present") } @@ -523,7 +524,8 @@ nextAdvisory: url: file.SHA256URL(), warn: true, hashType: algSha256, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256))}) + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256)), + }) } else { slog.Info("SHA256 not present") } From b6721e1d5ad3b2f4f4d6d37501a4b74cd665a2bd Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Fri, 10 Jan 2025 11:42:54 +0100 Subject: [PATCH 087/176] Add check for missing either sha256 or sha512 hashes only --- cmd/csaf_checker/processor.go | 4 ++++ testdata/processor-requirements/sha256-rolie.json | 2 +- testdata/processor-requirements/sha512-rolie.json | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 224e225..5c4f66e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -584,6 +584,10 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { var file csaf.AdvisoryFile switch { + case sha256 == "" && sha512 != "": + p.badROLIEFeed.info("%s has no sha256 hash file listed", url) + case sha256 != "" && sha512 == "": + p.badROLIEFeed.info("%s has no sha512 hash file listed", url) case sha256 == "" && sha512 == "": p.badROLIEFeed.error("No hash listed on ROLIE feed %s", url) case sign == "": diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json index 5875174..4ed47f1 100644 --- a/testdata/processor-requirements/sha256-rolie.json +++ b/testdata/processor-requirements/sha256-rolie.json @@ -153,7 +153,7 @@ "messages": [ { "type": 0, - "text": "All checked ROLIE feeds validated fine." + "text": "{{.URL}}/white/avendor-advisory-0004.json has no sha512 hash file listed" } ] }, diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json index 5875174..a2a195d 100644 --- a/testdata/processor-requirements/sha512-rolie.json +++ b/testdata/processor-requirements/sha512-rolie.json @@ -153,7 +153,7 @@ "messages": [ { "type": 0, - "text": "All checked ROLIE feeds validated fine." + "text": "{{.URL}}/white/avendor-advisory-0004.json has no sha256 hash file listed" } ] }, From 028f468d6f25f2e47d96fb1a5d924d3e22ab5949 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 23 Jan 2025 10:32:13 +0100 Subject: [PATCH 088/176] Fix typo in error message Closes #608 --- cmd/csaf_checker/reporters.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_checker/reporters.go b/cmd/csaf_checker/reporters.go index 157eabe..9cd3fc8 100644 --- a/cmd/csaf_checker/reporters.go +++ b/cmd/csaf_checker/reporters.go @@ -178,7 +178,7 @@ func (r *tlpAmberRedReporter) report(p *processor, domain *Domain) { return } if len(p.badAmberRedPermissions) == 0 { - req.message(InfoType, "All tested advisories labeled TLP:WHITE or TLP:RED were access-protected.") + req.message(InfoType, "All tested advisories labeled TLP:AMBER or TLP:RED were access-protected.") return } req.Messages = p.badAmberRedPermissions From 59d2cef0826080f9bf7bd60332c15ec614153834 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 23 Jan 2025 11:53:57 +0100 Subject: [PATCH 089/176] Fix typos --- cmd/csaf_validator/main.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 346180b..3250388 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -107,7 +107,7 @@ func run(opts *options, files []string) error { log.Printf("error: loading %q as JSON failed: %v\n", file, err) continue } - // Validate agsinst Schema. + // Validate against Schema. validationErrs, err := csaf.ValidateCSAF(doc) if err != nil { log.Printf("error: validating %q against schema failed: %v\n", @@ -124,7 +124,7 @@ func run(opts *options, files []string) error { fmt.Printf("%q passes the schema validation.\n", file) } - // Check filename agains ID + // Check filename against ID if err := util.IDMatchesFilename(eval, doc, filepath.Base(file)); err != nil { log.Printf("%s: %s.\n", file, err) continue From 6e8c2ecc059090865dd6bc48bc4ff0371757c8ee Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 23 Jan 2025 12:22:11 +0100 Subject: [PATCH 090/176] Check remote validator even if file validation fails This makes it consistent with the handling of schema validation. --- cmd/csaf_validator/main.go | 1 - 1 file changed, 1 deletion(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 3250388..b3a0855 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -127,7 +127,6 @@ func run(opts *options, files []string) error { // Check filename against ID if err := util.IDMatchesFilename(eval, doc, filepath.Base(file)); err != nil { log.Printf("%s: %s.\n", file, err) - continue } // Validate against remote validator. From 84026b682d80e1edcc3ca8a8346c69a7e8e56059 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Tue, 28 Jan 2025 17:41:54 +0100 Subject: [PATCH 091/176] Update README.md to exchange csaf.io until it is fixed --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 463b1d9..8f0c5f3 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ # csaf -Implements a [CSAF](https://csaf.io/) +Implements a [CSAF](https://oasis-open.github.io/csaf-documentation/) ([specification v2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. From 7d74543bbbf7cc3f5051f6fef3a84c97347d5eba Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 29 Jan 2025 09:02:18 +0100 Subject: [PATCH 092/176] Fix: Now give errors if lookup methods fail, refactor accordingly --- cmd/csaf_checker/processor.go | 72 ++++++++++++++++------------------- 1 file changed, 33 insertions(+), 39 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 5d1b69b..e07f5ad 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1340,49 +1340,57 @@ func (p *processor) checkSecurityFolder(folder string) string { // checkDNS checks if the "csaf.data.security.domain.tld" DNS record is available // and serves the "provider-metadata.json". // It returns an empty string if all checks are passed, otherwise the errormessage. -func (p *processor) checkDNS(domain string) string { +func (p *processor) checkDNS(domain string) { + + p.badDNSPath.use() client := p.httpClient() path := "https://csaf.data.security." + domain res, err := client.Get(path) if err != nil { - return fmt.Sprintf("Fetching %s failed: %v", path, err) + p.badDNSPath.add(ErrorType, + fmt.Sprintf("Fetching %s failed: %v", path, err)) + return } if res.StatusCode != http.StatusOK { - return fmt.Sprintf("Fetching %s failed. Status code %d (%s)", - path, res.StatusCode, res.Status) - + p.badDNSPath.add(ErrorType, + fmt.Sprintf("Fetching %s failed. Status code %d (%s)", + path, res.StatusCode, res.Status)) } hash := sha256.New() defer res.Body.Close() content, err := io.ReadAll(res.Body) if err != nil { - return fmt.Sprintf("Error while reading the response from %s", path) + p.badDNSPath.add(ErrorType, + fmt.Sprintf("Error while reading the response from %s", path)) } hash.Write(content) if !bytes.Equal(hash.Sum(nil), p.pmd256) { - return fmt.Sprintf("%s does not serve the same provider-metadata.json as previously found", path) + p.badDNSPath.add(ErrorType, + fmt.Sprintf("%s does not serve the same provider-metadata.json as previously found", + path)) } - return "" } -// checkWellknownMetadataReporter checks if the provider-metadata.json file is +// checkWellknown checks if the provider-metadata.json file is // available under the /.well-known/csaf/ directory. Returns the errormessage if // an error was encountered, or an empty string otherwise -func (p *processor) checkWellknown(domain string) string { +func (p *processor) checkWellknown(domain string) { + + p.badWellknownMetadata.use() client := p.httpClient() path := "https://" + domain + "/.well-known/csaf/provider-metadata.json" res, err := client.Get(path) if err != nil { - return fmt.Sprintf("Fetching %s failed: %v", path, err) + p.badWellknownMetadata.add(ErrorType, + fmt.Sprintf("Fetching %s failed: %v", path, err)) } if res.StatusCode != http.StatusOK { - return fmt.Sprintf("Fetching %s failed. Status code %d (%s)", - path, res.StatusCode, res.Status) + p.badWellknownMetadata.add(ErrorType, fmt.Sprintf("Fetching %s failed. Status code %d (%s)", + path, res.StatusCode, res.Status)) } - return "" } // checkWellknownSecurityDNS @@ -1401,50 +1409,36 @@ func (p *processor) checkWellknown(domain string) string { // In that case, errors are returned. func (p *processor) checkWellknownSecurityDNS(domain string) error { - warningsW := p.checkWellknown(domain) + p.checkWellknown(domain) + p.checkDNS(domain) + // Security check for well known (default) and legacy location - warningsS, sDMessage := p.checkSecurity(domain, false) + warnings, sDMessage := p.checkSecurity(domain, false) // if the security.txt under .well-known was not okay // check for a security.txt within its legacy location sLMessage := "" - if warningsS == 1 { - warningsS, sLMessage = p.checkSecurity(domain, true) + if warnings == 1 { + warnings, sLMessage = p.checkSecurity(domain, true) } - warningsD := p.checkDNS(domain) - p.badWellknownMetadata.use() p.badSecurity.use() - p.badDNSPath.use() - - var kind MessageType - if warningsS != 1 || warningsD == "" || warningsW == "" { - kind = WarnType - } else { - kind = ErrorType - } // Info, Warning or Error depending on kind and warningS - kindSD := kind - if warningsS == 0 { + kindSD := WarnType + if warnings == 0 { kindSD = InfoType } - kindSL := kind - if warningsS == 2 { + kindSL := ErrorType + if warnings == 2 { kindSL = InfoType } - if warningsW != "" { - p.badWellknownMetadata.add(kind, warningsW) - } p.badSecurity.add(kindSD, sDMessage) // only if the well-known security.txt was not successful: // report about the legacy location - if warningsS != 0 { + if warnings != 0 { p.badSecurity.add(kindSL, sLMessage) } - if warningsD != "" { - p.badDNSPath.add(kind, warningsD) - } return nil } From 02787b24b799113b769b9ce3bfaeeb66b435340e Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 29 Jan 2025 09:26:59 +0100 Subject: [PATCH 093/176] Update comments, clean up security check --- cmd/csaf_checker/processor.go | 44 +++++++++++++++++------------------ 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index e07f5ad..cb38bda 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1339,7 +1339,6 @@ func (p *processor) checkSecurityFolder(folder string) string { // checkDNS checks if the "csaf.data.security.domain.tld" DNS record is available // and serves the "provider-metadata.json". -// It returns an empty string if all checks are passed, otherwise the errormessage. func (p *processor) checkDNS(domain string) { p.badDNSPath.use() @@ -1373,8 +1372,7 @@ func (p *processor) checkDNS(domain string) { } // checkWellknown checks if the provider-metadata.json file is -// available under the /.well-known/csaf/ directory. Returns the errormessage if -// an error was encountered, or an empty string otherwise +// available under the /.well-known/csaf/ directory. func (p *processor) checkWellknown(domain string) { p.badWellknownMetadata.use() @@ -1402,15 +1400,13 @@ func (p *processor) checkWellknown(domain string) { // 4. Finally it checks if the "csaf.data.security.domain.tld" DNS record // is available and serves the "provider-metadata.json". // -// / -// If all three checks fail, errors are given, -// otherwise warnings for all failed checks. -// The function returns nil, unless errors outside the checks were found. -// In that case, errors are returned. +// For the security.txt checks, it first checks the default location. +// Should this lookup fail, a warning is will be given and a lookup +// for the legacy location will be made. If this fails as well, then an +// error is given. func (p *processor) checkWellknownSecurityDNS(domain string) error { p.checkWellknown(domain) - p.checkDNS(domain) // Security check for well known (default) and legacy location warnings, sDMessage := p.checkSecurity(domain, false) @@ -1423,22 +1419,24 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { p.badSecurity.use() - // Info, Warning or Error depending on kind and warningS - kindSD := WarnType - if warnings == 0 { - kindSD = InfoType - } - kindSL := ErrorType - if warnings == 2 { - kindSL = InfoType + // Report about Securitytxt: + // Only report about Legacy if default was succesful (0). + // Report default and legacy as errors if neither was succesful (1). + // Warn about missing security in the default position if not found + // but found in the legacy location, and inform about finding it there (2). + switch warnings { + case 0: + p.badSecurity.add(InfoType, sDMessage) + case 1: + p.badSecurity.add(ErrorType, sDMessage) + p.badSecurity.add(ErrorType, sLMessage) + case 2: + p.badSecurity.add(WarnType, sDMessage) + p.badSecurity.add(InfoType, sLMessage) } - p.badSecurity.add(kindSD, sDMessage) - // only if the well-known security.txt was not successful: - // report about the legacy location - if warnings != 0 { - p.badSecurity.add(kindSL, sLMessage) - } + p.checkDNS(domain) + return nil } From 82a6929e4dd9aea3743cb905e415665825f0dc89 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 29 Jan 2025 09:41:16 +0100 Subject: [PATCH 094/176] Fix: Poor phrasing corrected --- cmd/csaf_checker/processor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index cb38bda..d6f0f6b 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1420,7 +1420,7 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { p.badSecurity.use() // Report about Securitytxt: - // Only report about Legacy if default was succesful (0). + // Only report about default location if it was succesful (0). // Report default and legacy as errors if neither was succesful (1). // Warn about missing security in the default position if not found // but found in the legacy location, and inform about finding it there (2). From 6e02de974e537ace9cd08179225a715674f8f096 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 25 Feb 2025 15:03:38 +0100 Subject: [PATCH 095/176] update release workflow dependencies and so glibc * Update runner to ubuntu-22.04 which is the eldest to be supported by github from 2025-04-01. * Update github actions and go version needed. --- .github/workflows/release.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d1e370f..f77c9e3 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,23 +7,23 @@ on: jobs: releases-matrix: name: Release Go binaries - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 permissions: contents: write steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v5 with: - go-version: '^1.21.0' + go-version: '^1.23.6' - name: Build run: make dist - name: Upload release assets - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: files: | dist/csaf-*.zip From a4a90f4f9274b295c27bfb6df255e6b2a5134f45 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 25 Feb 2025 15:07:34 +0100 Subject: [PATCH 096/176] update go version to 1.23 --- .github/workflows/itest.yml | 6 +++--- README.md | 2 +- docs/Development.md | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index 9cc4c6b..b537b39 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -7,9 +7,9 @@ jobs: steps: - name: Set up Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v5 with: - go-version: 1.22.0 + go-version: '^1.23.6' - name: Set up Node.js uses: actions/setup-node@v3 @@ -17,7 +17,7 @@ jobs: node-version: 16 - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Execute the scripts run: | diff --git a/README.md b/README.md index 8f0c5f3..b76bf95 100644 --- a/README.md +++ b/README.md @@ -79,7 +79,7 @@ Download the binaries from the most recent release assets on Github. ### Build from sources -- A recent version of **Go** (1.22+) should be installed. [Go installation](https://go.dev/doc/install) +- A recent version of **Go** (1.23+) should be installed. [Go installation](https://go.dev/doc/install) - Clone the repository `git clone https://github.com/gocsaf/csaf.git ` diff --git a/docs/Development.md b/docs/Development.md index 5c4df22..bc71c2c 100644 --- a/docs/Development.md +++ b/docs/Development.md @@ -3,7 +3,7 @@ ## Supported Go versions We support the latest version and the one before -the latest version of Go (currently 1.22 and 1.23). +the latest version of Go (currently 1.23 and 1.24). ## Generated files From 3afa8d8b2e908cba70bddde5442240cab5ec9bb9 Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 25 Feb 2025 15:24:24 +0100 Subject: [PATCH 097/176] Upgrade to artifact action v4 --- .github/workflows/itest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index b537b39..8bc87d5 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -36,7 +36,7 @@ jobs: shell: bash - name: Upload test results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: checker-results path: | From e91bdec201822e1e334582a5dde0388e92d74994 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Mon, 3 Mar 2025 17:31:21 +0100 Subject: [PATCH 098/176] Add example for iterating product id and product helper (#617) * Add example for iterating product id and product helper * simplify code a bit * Remove newline --------- Co-authored-by: Sascha L. Teichmann --- examples/product_lister/main.go | 141 ++++++++++++++++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100644 examples/product_lister/main.go diff --git a/examples/product_lister/main.go b/examples/product_lister/main.go new file mode 100644 index 0000000..5ad26a9 --- /dev/null +++ b/examples/product_lister/main.go @@ -0,0 +1,141 @@ +// Package main implements a simple demo program to +// work with the csaf library. +package main + +import ( + "encoding/json" + "flag" + "fmt" + "log" + "os" + + "github.com/gocsaf/csaf/v3/csaf" +) + +func main() { + flag.Usage = func() { + if _, err := fmt.Fprintf(flag.CommandLine.Output(), + "Usage:\n %s [OPTIONS] files...\n\nOptions:\n", os.Args[0]); err != nil { + log.Fatalf("error: %v\n", err) + } + flag.PrintDefaults() + } + printProductIdentHelper := flag.Bool("print_ident_helper", false, "print product helper mapping") + flag.Parse() + + files := flag.Args() + if len(files) == 0 { + log.Println("No files given.") + return + } + + var printer func(*csaf.Advisory) error + if *printProductIdentHelper { + printer = printProductIdentHelperMapping + } else { + printer = printProductIDMapping + } + + if err := run(files, printer); err != nil { + log.Fatalf("error: %v\n", err) + } +} + +// visitFullProductNames iterates all full product names in the advisory. +func visitFullProductNames( + adv *csaf.Advisory, + visit func(*csaf.FullProductName), +) { + // Iterate over all full product names + if fpns := adv.ProductTree.FullProductNames; fpns != nil { + for _, fpn := range *fpns { + if fpn != nil && fpn.ProductID != nil { + visit(fpn) + } + } + } + + // Iterate over branches recursively + var recBranch func(b *csaf.Branch) + recBranch = func(b *csaf.Branch) { + if b == nil { + return + } + if fpn := b.Product; fpn != nil && fpn.ProductID != nil { + visit(fpn) + + } + for _, c := range b.Branches { + recBranch(c) + } + } + for _, b := range adv.ProductTree.Branches { + recBranch(b) + } + + // Iterate over relationships + if rels := adv.ProductTree.RelationShips; rels != nil { + for _, rel := range *rels { + if rel != nil { + if fpn := rel.FullProductName; fpn != nil && fpn.ProductID != nil { + visit(fpn) + } + } + } + } +} + +// run applies fn to all loaded advisories. +func run(files []string, fn func(*csaf.Advisory) error) error { + for _, file := range files { + adv, err := csaf.LoadAdvisory(file) + if err != nil { + return fmt.Errorf("loading %q failed: %w", file, err) + } + if err := fn(adv); err != nil { + return err + } + } + return nil +} + +// printJSON serializes v as indented JSON to stdout. +func printJSON(v any) error { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(v) +} + +// printProductIDMapping prints all product ids with their name and identification helper. +func printProductIDMapping(adv *csaf.Advisory) error { + type productNameHelperMapping struct { + FullProductName *csaf.FullProductName `json:"product"` + ProductIdentificationHelper *csaf.ProductIdentificationHelper `json:"product_identification_helper"` + } + + productIDMap := map[csaf.ProductID][]productNameHelperMapping{} + visitFullProductNames(adv, func(fpn *csaf.FullProductName) { + productIDMap[*fpn.ProductID] = append(productIDMap[*fpn.ProductID], productNameHelperMapping{ + FullProductName: fpn, + ProductIdentificationHelper: fpn.ProductIdentificationHelper, + }) + }) + return printJSON(productIDMap) +} + +// printProductIdentHelperMapping prints all product identifier helper with their product id. +func printProductIdentHelperMapping(adv *csaf.Advisory) error { + type productIdentIDMapping struct { + ProductNameHelperMapping csaf.ProductIdentificationHelper `json:"product_identification_helper"` + ProductID *csaf.ProductID `json:"product_id"` + } + + productIdentMap := []productIdentIDMapping{} + visitFullProductNames(adv, func(fpn *csaf.FullProductName) { + productIdentMap = append(productIdentMap, productIdentIDMapping{ + ProductNameHelperMapping: *fpn.ProductIdentificationHelper, + ProductID: fpn.ProductID, + }) + }) + return printJSON(productIdentMap) +} From 24f9af7f26bf558ec92dedc86317a1267b169896 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 5 Mar 2025 09:55:11 +0100 Subject: [PATCH 099/176] Add documentation for externally signed documents Closes #607 --- docs/csaf_uploader.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/csaf_uploader.md b/docs/csaf_uploader.md index 0e68aa9..76af99f 100644 --- a/docs/csaf_uploader.md +++ b/docs/csaf_uploader.md @@ -43,6 +43,12 @@ E.g. uploading a csaf-document which asks to enter a password interactively. +To upload an already signed document, use the `-x` option +```bash +# Note: The file CSAF-document-1.json.asc must exist +./csaf_uploader -x -a upload -I -t white -u https://localhost/cgi-bin/csaf_provider.go CSAF-document-1.json +``` + By default csaf_uploader will try to load a config file from the following places: From ec0c3f9c2ca9a9080f876944ddac5f0a583b5b11 Mon Sep 17 00:00:00 2001 From: Marcus Perlick <38723273+marcusperlick@users.noreply.github.com> Date: Mon, 10 Mar 2025 09:24:49 +0100 Subject: [PATCH 100/176] Fix potential leak of HTTP response body in downloadJSON of csaf_aggregator (#618) --- cmd/csaf_aggregator/client.go | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/cmd/csaf_aggregator/client.go b/cmd/csaf_aggregator/client.go index 916baa5..abd475c 100644 --- a/cmd/csaf_aggregator/client.go +++ b/cmd/csaf_aggregator/client.go @@ -10,6 +10,7 @@ package main import ( "errors" + "fmt" "io" "net/http" @@ -20,13 +21,14 @@ var errNotFound = errors.New("not found") func downloadJSON(c util.Client, url string, found func(io.Reader) error) error { res, err := c.Get(url) - if err != nil || res.StatusCode != http.StatusOK || + if err != nil { + return fmt.Errorf("not found: %w", err) + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK || res.Header.Get("Content-Type") != "application/json" { // ignore this as it is expected. return errNotFound } - return func() error { - defer res.Body.Close() - return found(res.Body) - }() + return found(res.Body) } From 3cfafa8263112d79d489dbc170004fcf3498340b Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 10 Mar 2025 11:11:34 +0100 Subject: [PATCH 101/176] Report error in checker if content type is not correct Related: #606 --- cmd/csaf_checker/processor.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index c0aafb2..397c88e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -678,9 +678,9 @@ func (p *processor) integrity( continue } - // Warn if we do not get JSON. + // Error if we do not get JSON. if ct := res.Header.Get("Content-Type"); ct != "application/json" { - lg(WarnType, + lg(ErrorType, "The content type of %s should be 'application/json' but is '%s'", u, ct) } From 534d6f049f9ed5cf54c75c8a2ede3a23511868f4 Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 10 Mar 2025 12:02:44 +0100 Subject: [PATCH 102/176] Add content-type error report test --- cmd/csaf_checker/processor_test.go | 53 ++++++++++++++++++++++++++++++ internal/testutil/testutil.go | 18 ++++++---- 2 files changed, 65 insertions(+), 6 deletions(-) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 0710f32..4d13908 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -14,6 +14,8 @@ import ( "net/http/httptest" "os" "reflect" + "slices" + "strings" "testing" "text/template" @@ -65,6 +67,57 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct return requirement } +func TestContentTypeReport(t *testing.T) { + serverURL := "" + params := testutil.ProviderParams{ + URL: "", + EnableSha256: true, + EnableSha512: true, + ForbidSha256: true, + ForbidSha512: true, + JSONContentType: "application/json; charset=utf-8", + } + server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, false)) + defer server.Close() + + serverURL = server.URL + params.URL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + cfg := config{} + err := cfg.prepare() + if err != nil { + t.Fatalf("SHA marking config failed: %v", err) + } + p, err := newProcessor(&cfg) + if err != nil { + t.Fatalf("could not init downloader: %v", err) + } + p.client = client + + report, err := p.run([]string{serverURL + "/provider-metadata.json"}) + if err != nil { + t.Errorf("Content-Type-Report: Expected no error, got: %v", err) + } + + got := report.Domains[0].Requirements + idx := slices.IndexFunc(got, func(e *Requirement) bool { + return e.Num == 7 + }) + if idx == -1 { + t.Error("Content-Type-Report: Could not find requirement") + } else { + message := got[idx].Messages[0] + if message.Type != ErrorType || !strings.Contains(message.Text, "should be 'application/json'") { + t.Errorf("Content-Type-Report: Content Type Error, got %v", message) + } + } + + p.close() +} + func TestShaMarking(t *testing.T) { tests := []struct { name string diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go index c7bad68..a8186a4 100644 --- a/internal/testutil/testutil.go +++ b/internal/testutil/testutil.go @@ -18,11 +18,12 @@ import ( // ProviderParams configures the test provider. type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool - ForbidSha256 bool - ForbidSha512 bool + URL string + EnableSha256 bool + EnableSha512 bool + ForbidSha256 bool + ForbidSha512 bool + JSONContentType string } // ProviderHandler returns a test provider handler with the specified configuration. @@ -35,6 +36,11 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle path += "simple-rolie-provider" } + jsonContenType := "application/json" + if params.JSONContentType != "" { + jsonContenType = params.JSONContentType + } + path += r.URL.Path if strings.HasSuffix(r.URL.Path, "/") { @@ -50,7 +56,7 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle case strings.HasSuffix(path, ".html"): w.Header().Add("Content-Type", "text/html") case strings.HasSuffix(path, ".json"): - w.Header().Add("Content-Type", "application/json") + w.Header().Add("Content-Type", jsonContenType) case (strings.HasSuffix(path, ".sha256")) && params.ForbidSha256: w.WriteHeader(http.StatusForbidden) return From 4429dd69857d59fe0ef2c6ca5a6974ac76062e50 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 13 Mar 2025 18:23:28 +0100 Subject: [PATCH 103/176] feat: add access-control-allow-origin header .. for better access from web applications. improve #479 --- docs/scripts/DNSConfigForItest.sh | 2 ++ docs/scripts/setupProviderForITest.sh | 11 +++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/docs/scripts/DNSConfigForItest.sh b/docs/scripts/DNSConfigForItest.sh index f7b85f0..9196af3 100755 --- a/docs/scripts/DNSConfigForItest.sh +++ b/docs/scripts/DNSConfigForItest.sh @@ -28,6 +28,8 @@ echo " location = / { try_files /.well-known/csaf/provider-metadata.json =404; + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; } access_log /var/log/nginx/dns-domain_access.log; diff --git a/docs/scripts/setupProviderForITest.sh b/docs/scripts/setupProviderForITest.sh index f9d7d18..2b6e6d1 100755 --- a/docs/scripts/setupProviderForITest.sh +++ b/docs/scripts/setupProviderForITest.sh @@ -56,11 +56,14 @@ location /cgi-bin/ { sudo sed -i "/^server {/a\ include fcgiwrap.conf;" $NGINX_CONFIG_PATH echo " - # For atomic directory switches - disable_symlinks off; + # For atomic directory switches + disable_symlinks off; - # directory listings - autoindex on; + # directory listings + autoindex on; + + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; " > locationConfig.txt sudo sed -i "/^\s*location \/ {/r locationConfig.txt" $NGINX_CONFIG_PATH # Insert config inside location{} ./DNSConfigForItest.sh From 527fe71992797095f99e95c02f69711dc629e03d Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 13 Mar 2025 18:30:38 +0100 Subject: [PATCH 104/176] feat: set acao header * adapt provider-setup.md to changes for the acao header. --- docs/provider-setup.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/provider-setup.md b/docs/provider-setup.md index 48c29d0..2fdf1e3 100644 --- a/docs/provider-setup.md +++ b/docs/provider-setup.md @@ -78,6 +78,9 @@ server { # directory listings autoindex on; + + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; } # enable CGI @@ -155,7 +158,7 @@ Again replacing `{clientCert.crt}` and `{clientKey.pem}` accordingly. To let nginx resolves the DNS record `csaf.data.security.domain.tld` to fulfill the [Requirement 10](https://docs.oasis-open.org/csaf/csaf/v2.0/cs01/csaf-v2.0-cs01.html#7110-requirement-10-dns-path) configure a new server block (virtual host) in a separated file under `/etc/nginx/available-sites/{DNSNAME}` like following: - + ```sh server { From 8163f578511f417a0c1b9b4b58de8574b7916736 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Fri, 14 Mar 2025 10:05:56 +0100 Subject: [PATCH 105/176] Compare changes dates (#609) * Feat: Compare dates in changes.csv to those within the files if existent * Fix: remove debug output and fix typo * Make map handling consistent * Improve: refactor time extraction * fix: some syntax fixes * Small nits * Fix: Check changes before stopping the scan of already tested advisories * Revert "Fix: Check changes before stopping the scan of already tested advisories - bad way to solve the problem, can cause problems" This reverts commit d38dc285cc8e664dc97f81418b2b52174e83e68b. * fix: delay checking of changes dates so it is not skipped most of the time * Fix time comparison --------- Co-authored-by: koplas Co-authored-by: Sascha L. Teichmann --- cmd/csaf_checker/processor.go | 85 ++++++++++++++++++++++++++--------- 1 file changed, 65 insertions(+), 20 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 397c88e..ae79133 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -53,6 +53,8 @@ type processor struct { pmd any keys *crypto.KeyRing labelChecker labelChecker + timesChanges map[string]time.Time + timesAdv map[string]time.Time invalidAdvisories topicMessages badFilenames topicMessages @@ -188,6 +190,9 @@ func newProcessor(cfg *config) (*processor, error) { advisories: map[csaf.TLPLabel]util.Set[string]{}, whiteAdvisories: map[identifier]bool{}, }, + timesAdv: map[string]time.Time{}, + timesChanges: map[string]time.Time{}, + noneTLS: util.Set[string]{}, }, nil } @@ -202,14 +207,14 @@ func (p *processor) close() { // reset clears the fields values of the given processor. func (p *processor) reset() { p.redirects = nil - p.noneTLS = nil - for k := range p.alreadyChecked { - delete(p.alreadyChecked, k) - } p.pmdURL = "" p.pmd256 = nil p.pmd = nil p.keys = nil + clear(p.alreadyChecked) + clear(p.noneTLS) + clear(p.timesAdv) + clear(p.timesChanges) p.invalidAdvisories.reset() p.badFilenames.reset() @@ -371,9 +376,6 @@ func (p *processor) checkDomain(domain string) error { // checkTLS parses the given URL to check its schema, as a result it sets // the value of "noneTLS" field if it is not HTTPS. func (p *processor) checkTLS(u string) { - if p.noneTLS == nil { - p.noneTLS = util.Set[string]{} - } if x, err := url.Parse(u); err == nil && x.Scheme != "https" { p.noneTLS.Add(u) } @@ -617,6 +619,8 @@ func makeAbsolute(base *url.URL) func(*url.URL) *url.URL { var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`) +// integrity checks several csaf.AdvisoryFiles for formal +// mistakes, from conforming filenames to invalid advisories. func (p *processor) integrity( files []csaf.AdvisoryFile, base string, @@ -732,19 +736,19 @@ func (p *processor) integrity( // Check if file is in the right folder. p.badFolders.use() - if date, err := p.expr.Eval( - `$.document.tracking.initial_release_date`, doc); err != nil { - p.badFolders.error( - "Extracting 'initial_release_date' from %s failed: %v", u, err) - } else if text, ok := date.(string); !ok { - p.badFolders.error("'initial_release_date' is not a string in %s", u) - } else if d, err := time.Parse(time.RFC3339, text); err != nil { - p.badFolders.error( - "Parsing 'initial_release_date' as RFC3339 failed in %s: %v", u, err) - } else if folderYear == nil { + switch date, fault := p.extractTime(doc, `initial_release_date`, u); { + case fault != "": + p.badFolders.error(fault) + case folderYear == nil: p.badFolders.error("No year folder found in %s", u) - } else if d.UTC().Year() != *folderYear { - p.badFolders.error("%s should be in folder %d", u, d.UTC().Year()) + case date.UTC().Year() != *folderYear: + p.badFolders.error("%s should be in folder %d", u, date.UTC().Year()) + } + current, fault := p.extractTime(doc, `current_release_date`, u) + if fault != "" { + p.badChanges.error(fault) + } else { + p.timesAdv[f.URL()] = current } // Check hashes @@ -861,9 +865,48 @@ func (p *processor) integrity( } } + // If we tested an existing changes.csv + if len(p.timesAdv) > 0 && p.badChanges.used() { + // Iterate over all files again + for _, f := range files { + // If there was no previous error when extracting times from advisories and we have a valid time + if timeAdv, ok := p.timesAdv[f.URL()]; ok { + // If there was no previous error when extracting times from changes and the file was listed in changes.csv + if timeCha, ok := p.timesChanges[f.URL()]; ok { + // check if the time matches + if !timeAdv.Equal(timeCha) { + // if not, give an error and remove the pair so it isn't reported multiple times should integrity be called again + p.badChanges.error("Current release date in changes.csv and %s is not identical.", f.URL()) + delete(p.timesAdv, f.URL()) + delete(p.timesChanges, f.URL()) + } + } + } + } + } + return nil } +// extractTime extracts a time.Time value from a json document and returns it and an empty string or zero time alongside +// a string representing the error message that prevented obtaining the proper time value. +func (p *processor) extractTime(doc any, value string, u any) (time.Time, string) { + filter := "$.document.tracking." + value + date, err := p.expr.Eval(filter, doc) + if err != nil { + return time.Time{}, fmt.Sprintf("Extracting '%s' from %s failed: %v", value, u, err) + } + text, ok := date.(string) + if !ok { + return time.Time{}, fmt.Sprintf("'%s' is not a string in %s", value, u) + } + d, err := time.Parse(time.RFC3339, text) + if err != nil { + return time.Time{}, fmt.Sprintf("Parsing '%s' as RFC3339 failed in %s: %v", value, u, err) + } + return d, "" +} + // checkIndex fetches the "index.txt" and calls "checkTLS" method for HTTPS checks. // It extracts the file names from the file and passes them to "integrity" function. // It returns error if fetching/reading the file(s) fails, otherwise nil. @@ -991,8 +1034,10 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] - times, files = append(times, t), + times, files = + append(times, t), append(files, csaf.DirectoryAdvisoryFile{Path: path}) + p.timesChanges[path] = t } return times, files, nil }() From 17f6a3ac7eb7fac39825fb1ae8c25398d288fedc Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 14 Mar 2025 10:26:19 +0100 Subject: [PATCH 106/176] Fix inconsistent format --- docs/scripts/setupProviderForITest.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/scripts/setupProviderForITest.sh b/docs/scripts/setupProviderForITest.sh index 2b6e6d1..ae6c6fc 100755 --- a/docs/scripts/setupProviderForITest.sh +++ b/docs/scripts/setupProviderForITest.sh @@ -56,14 +56,14 @@ location /cgi-bin/ { sudo sed -i "/^server {/a\ include fcgiwrap.conf;" $NGINX_CONFIG_PATH echo " - # For atomic directory switches - disable_symlinks off; + # For atomic directory switches + disable_symlinks off; - # directory listings - autoindex on; + # directory listings + autoindex on; - # allow others web applications to get the static information - add_header Access-Control-Allow-Origin "*"; + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; " > locationConfig.txt sudo sed -i "/^\s*location \/ {/r locationConfig.txt" $NGINX_CONFIG_PATH # Insert config inside location{} ./DNSConfigForItest.sh From a7821265ca4dfc65ec3966d970047c322900e188 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 17 Mar 2025 08:57:05 +0100 Subject: [PATCH 107/176] Move advisory downloading to download context method --- cmd/csaf_downloader/downloader.go | 616 ++++++++++++++++-------------- 1 file changed, 319 insertions(+), 297 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 3270a88..5af7f5e 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -417,6 +417,320 @@ func (d *downloader) logValidationIssues(url string, errors []string, err error) } } +// downloadContext stores the common context of a downloader. +type downloadContext struct { + d *downloader + client util.Client + data bytes.Buffer + lastDir string + initialReleaseDate time.Time + dateExtract func(any) error + lower string + stats stats + expr *util.PathEval +} + +func newDownloadContext(d *downloader, label csaf.TLPLabel) *downloadContext { + dc := &downloadContext{ + client: d.httpClient(), + lower: strings.ToLower(string(label)), + expr: util.NewPathEval(), + } + dc.dateExtract = util.TimeMatcher(&dc.initialReleaseDate, time.RFC3339) + return dc +} + +func (dc *downloadContext) downloadAdvisory( + file csaf.AdvisoryFile, + errorCh chan<- error, +) error { + u, err := url.Parse(file.URL()) + if err != nil { + dc.stats.downloadFailed++ + slog.Warn("Ignoring invalid URL", + "url", file.URL(), + "error", err) + return nil + } + + if dc.d.cfg.ignoreURL(file.URL()) { + slog.Debug("Ignoring URL", "url", file.URL()) + return nil + } + + // Ignore not conforming filenames. + filename := filepath.Base(u.Path) + if !util.ConformingFileName(filename) { + dc.stats.filenameFailed++ + slog.Warn("Ignoring none conforming filename", + "filename", filename) + return nil + } + + resp, err := dc.client.Get(file.URL()) + if err != nil { + dc.stats.downloadFailed++ + slog.Warn("Cannot GET", + "url", file.URL(), + "error", err) + return nil + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + dc.stats.downloadFailed++ + slog.Warn("Cannot load", + "url", file.URL(), + "status", resp.Status, + "status_code", resp.StatusCode) + return nil + } + + // Warn if we do not get JSON. + if ct := resp.Header.Get("Content-Type"); ct != "application/json" { + slog.Warn("Content type is not 'application/json'", + "url", file.URL(), + "content_type", ct) + } + + var ( + writers []io.Writer + s256, s512 hash.Hash + s256Data, s512Data []byte + remoteSHA256, remoteSHA512 []byte + signData []byte + ) + + hashToFetch := []hashFetchInfo{} + if file.SHA512URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA512URL(), + warn: true, + hashType: algSha512, + preferred: strings.EqualFold(string(dc.d.cfg.PreferredHash), string(algSha512)), + }) + } else { + slog.Info("SHA512 not present") + } + if file.SHA256URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA256URL(), + warn: true, + hashType: algSha256, + preferred: strings.EqualFold(string(dc.d.cfg.PreferredHash), string(algSha256)), + }) + } else { + slog.Info("SHA256 not present") + } + if file.IsDirectory() { + for i := range hashToFetch { + hashToFetch[i].warn = false + } + } + + remoteSHA256, s256Data, remoteSHA512, s512Data = loadHashes(dc.client, hashToFetch) + if remoteSHA512 != nil { + s512 = sha512.New() + writers = append(writers, s512) + } + if remoteSHA256 != nil { + s256 = sha256.New() + writers = append(writers, s256) + } + + // Remember the data as we need to store it to file later. + dc.data.Reset() + writers = append(writers, &dc.data) + + // Download the advisory and hash it. + hasher := io.MultiWriter(writers...) + + var doc any + + tee := io.TeeReader(resp.Body, hasher) + + if err := json.NewDecoder(tee).Decode(&doc); err != nil { + dc.stats.downloadFailed++ + slog.Warn("Downloading failed", + "url", file.URL(), + "error", err) + return nil + } + + // Compare the checksums. + s256Check := func() error { + if s256 != nil && !bytes.Equal(s256.Sum(nil), remoteSHA256) { + dc.stats.sha256Failed++ + return fmt.Errorf("SHA256 checksum of %s does not match", file.URL()) + } + return nil + } + + s512Check := func() error { + if s512 != nil && !bytes.Equal(s512.Sum(nil), remoteSHA512) { + dc.stats.sha512Failed++ + return fmt.Errorf("SHA512 checksum of %s does not match", file.URL()) + } + return nil + } + + // Validate OpenPGP signature. + keysCheck := func() error { + // Only check signature if we have loaded keys. + if dc.d.keys == nil { + return nil + } + var sign *crypto.PGPSignature + sign, signData, err = loadSignature(dc.client, file.SignURL()) + if err != nil { + slog.Warn("Downloading signature failed", + "url", file.SignURL(), + "error", err) + } + if sign != nil { + if err := dc.d.checkSignature(dc.data.Bytes(), sign); err != nil { + if !dc.d.cfg.IgnoreSignatureCheck { + dc.stats.signatureFailed++ + return fmt.Errorf("cannot verify signature for %s: %v", file.URL(), err) + } + } + } + return nil + } + + // Validate against CSAF schema. + schemaCheck := func() error { + if errors, err := csaf.ValidateCSAF(doc); err != nil || len(errors) > 0 { + dc.stats.schemaFailed++ + dc.d.logValidationIssues(file.URL(), errors, err) + return fmt.Errorf("schema validation for %q failed", file.URL()) + } + return nil + } + + // Validate if filename is conforming. + filenameCheck := func() error { + if err := util.IDMatchesFilename(dc.expr, doc, filename); err != nil { + dc.stats.filenameFailed++ + return fmt.Errorf("filename not conforming %s: %s", file.URL(), err) + } + return nil + } + + // Validate against remote validator. + remoteValidatorCheck := func() error { + if dc.d.validator == nil { + return nil + } + rvr, err := dc.d.validator.Validate(doc) + if err != nil { + errorCh <- fmt.Errorf( + "calling remote validator on %q failed: %w", + file.URL(), err) + return nil + } + if !rvr.Valid { + dc.stats.remoteFailed++ + return fmt.Errorf("remote validation of %q failed", file.URL()) + } + return nil + } + + // Run all the validations. + valStatus := notValidatedValidationStatus + for _, check := range []func() error{ + s256Check, + s512Check, + keysCheck, + schemaCheck, + filenameCheck, + remoteValidatorCheck, + } { + if err := check(); err != nil { + slog.Error("Validation check failed", "error", err) + valStatus.update(invalidValidationStatus) + if dc.d.cfg.ValidationMode == validationStrict { + return nil + } + } + } + valStatus.update(validValidationStatus) + + // Send to forwarder + if dc.d.forwarder != nil { + dc.d.forwarder.forward( + filename, dc.data.String(), + valStatus, + string(s256Data), + string(s512Data)) + } + + if dc.d.cfg.NoStore { + // Do not write locally. + if valStatus == validValidationStatus { + dc.stats.succeeded++ + } + return nil + } + + if err := dc.expr.Extract( + `$.document.tracking.initial_release_date`, dc.dateExtract, false, doc, + ); err != nil { + slog.Warn("Cannot extract initial_release_date from advisory", + "url", file.URL()) + dc.initialReleaseDate = time.Now() + } + dc.initialReleaseDate = dc.initialReleaseDate.UTC() + + // Advisories that failed validation are stored in a special folder. + var newDir string + if valStatus != validValidationStatus { + newDir = path.Join(dc.d.cfg.Directory, failedValidationDir) + } else { + newDir = dc.d.cfg.Directory + } + + // Do we have a configured destination folder? + if dc.d.cfg.Folder != "" { + newDir = path.Join(newDir, dc.d.cfg.Folder) + } else { + newDir = path.Join(newDir, dc.lower, strconv.Itoa(dc.initialReleaseDate.Year())) + } + + if newDir != dc.lastDir { + if err := dc.d.mkdirAll(newDir, 0755); err != nil { + errorCh <- err + return nil + } + dc.lastDir = newDir + } + + // Write advisory to file + path := filepath.Join(dc.lastDir, filename) + + // Write data to disk. + for _, x := range []struct { + p string + d []byte + }{ + {path, dc.data.Bytes()}, + {path + ".sha256", s256Data}, + {path + ".sha512", s512Data}, + {path + ".asc", signData}, + } { + if x.d != nil { + if err := os.WriteFile(x.p, x.d, 0644); err != nil { + errorCh <- err + return nil + } + } + } + + dc.stats.succeeded++ + slog.Info("Written advisory", "path", path) + return nil +} + func (d *downloader) downloadWorker( ctx context.Context, wg *sync.WaitGroup, @@ -426,21 +740,11 @@ func (d *downloader) downloadWorker( ) { defer wg.Done() - var ( - client = d.httpClient() - data bytes.Buffer - lastDir string - initialReleaseDate time.Time - dateExtract = util.TimeMatcher(&initialReleaseDate, time.RFC3339) - lower = strings.ToLower(string(label)) - stats = stats{} - expr = util.NewPathEval() - ) + dc := newDownloadContext(d, label) // Add collected stats back to total. - defer d.addStats(&stats) + defer d.addStats(&dc.stats) -nextAdvisory: for { var file csaf.AdvisoryFile var ok bool @@ -452,292 +756,10 @@ nextAdvisory: case <-ctx.Done(): return } - - u, err := url.Parse(file.URL()) - if err != nil { - stats.downloadFailed++ - slog.Warn("Ignoring invalid URL", - "url", file.URL(), - "error", err) - continue + if err := dc.downloadAdvisory(file, errorCh); err != nil { + slog.Error("download terminated", "error", err) + return } - - if d.cfg.ignoreURL(file.URL()) { - slog.Debug("Ignoring URL", "url", file.URL()) - continue - } - - // Ignore not conforming filenames. - filename := filepath.Base(u.Path) - if !util.ConformingFileName(filename) { - stats.filenameFailed++ - slog.Warn("Ignoring none conforming filename", - "filename", filename) - continue - } - - resp, err := client.Get(file.URL()) - if err != nil { - stats.downloadFailed++ - slog.Warn("Cannot GET", - "url", file.URL(), - "error", err) - continue - } - - if resp.StatusCode != http.StatusOK { - stats.downloadFailed++ - slog.Warn("Cannot load", - "url", file.URL(), - "status", resp.Status, - "status_code", resp.StatusCode) - continue - } - - // Warn if we do not get JSON. - if ct := resp.Header.Get("Content-Type"); ct != "application/json" { - slog.Warn("Content type is not 'application/json'", - "url", file.URL(), - "content_type", ct) - } - - var ( - writers []io.Writer - s256, s512 hash.Hash - s256Data, s512Data []byte - remoteSHA256, remoteSHA512 []byte - signData []byte - ) - - hashToFetch := []hashFetchInfo{} - if file.SHA512URL() != "" { - hashToFetch = append(hashToFetch, hashFetchInfo{ - url: file.SHA512URL(), - warn: true, - hashType: algSha512, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512)), - }) - } else { - slog.Info("SHA512 not present") - } - if file.SHA256URL() != "" { - hashToFetch = append(hashToFetch, hashFetchInfo{ - url: file.SHA256URL(), - warn: true, - hashType: algSha256, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256)), - }) - } else { - slog.Info("SHA256 not present") - } - if file.IsDirectory() { - for i := range hashToFetch { - hashToFetch[i].warn = false - } - } - - remoteSHA256, s256Data, remoteSHA512, s512Data = loadHashes(client, hashToFetch) - if remoteSHA512 != nil { - s512 = sha512.New() - writers = append(writers, s512) - } - if remoteSHA256 != nil { - s256 = sha256.New() - writers = append(writers, s256) - } - - // Remember the data as we need to store it to file later. - data.Reset() - writers = append(writers, &data) - - // Download the advisory and hash it. - hasher := io.MultiWriter(writers...) - - var doc any - - if err := func() error { - defer resp.Body.Close() - tee := io.TeeReader(resp.Body, hasher) - return json.NewDecoder(tee).Decode(&doc) - }(); err != nil { - stats.downloadFailed++ - slog.Warn("Downloading failed", - "url", file.URL(), - "error", err) - continue - } - - // Compare the checksums. - s256Check := func() error { - if s256 != nil && !bytes.Equal(s256.Sum(nil), remoteSHA256) { - stats.sha256Failed++ - return fmt.Errorf("SHA256 checksum of %s does not match", file.URL()) - } - return nil - } - - s512Check := func() error { - if s512 != nil && !bytes.Equal(s512.Sum(nil), remoteSHA512) { - stats.sha512Failed++ - return fmt.Errorf("SHA512 checksum of %s does not match", file.URL()) - } - return nil - } - - // Validate OpenPGP signature. - keysCheck := func() error { - // Only check signature if we have loaded keys. - if d.keys == nil { - return nil - } - var sign *crypto.PGPSignature - sign, signData, err = loadSignature(client, file.SignURL()) - if err != nil { - slog.Warn("Downloading signature failed", - "url", file.SignURL(), - "error", err) - } - if sign != nil { - if err := d.checkSignature(data.Bytes(), sign); err != nil { - if !d.cfg.IgnoreSignatureCheck { - stats.signatureFailed++ - return fmt.Errorf("cannot verify signature for %s: %v", file.URL(), err) - } - } - } - return nil - } - - // Validate against CSAF schema. - schemaCheck := func() error { - if errors, err := csaf.ValidateCSAF(doc); err != nil || len(errors) > 0 { - stats.schemaFailed++ - d.logValidationIssues(file.URL(), errors, err) - return fmt.Errorf("schema validation for %q failed", file.URL()) - } - return nil - } - - // Validate if filename is conforming. - filenameCheck := func() error { - if err := util.IDMatchesFilename(expr, doc, filename); err != nil { - stats.filenameFailed++ - return fmt.Errorf("filename not conforming %s: %s", file.URL(), err) - } - return nil - } - - // Validate against remote validator. - remoteValidatorCheck := func() error { - if d.validator == nil { - return nil - } - rvr, err := d.validator.Validate(doc) - if err != nil { - errorCh <- fmt.Errorf( - "calling remote validator on %q failed: %w", - file.URL(), err) - return nil - } - if !rvr.Valid { - stats.remoteFailed++ - return fmt.Errorf("remote validation of %q failed", file.URL()) - } - return nil - } - - // Run all the validations. - valStatus := notValidatedValidationStatus - for _, check := range []func() error{ - s256Check, - s512Check, - keysCheck, - schemaCheck, - filenameCheck, - remoteValidatorCheck, - } { - if err := check(); err != nil { - slog.Error("Validation check failed", "error", err) - valStatus.update(invalidValidationStatus) - if d.cfg.ValidationMode == validationStrict { - continue nextAdvisory - } - } - } - valStatus.update(validValidationStatus) - - // Send to forwarder - if d.forwarder != nil { - d.forwarder.forward( - filename, data.String(), - valStatus, - string(s256Data), - string(s512Data)) - } - - if d.cfg.NoStore { - // Do not write locally. - if valStatus == validValidationStatus { - stats.succeeded++ - } - continue - } - - if err := expr.Extract( - `$.document.tracking.initial_release_date`, dateExtract, false, doc, - ); err != nil { - slog.Warn("Cannot extract initial_release_date from advisory", - "url", file.URL()) - initialReleaseDate = time.Now() - } - initialReleaseDate = initialReleaseDate.UTC() - - // Advisories that failed validation are stored in a special folder. - var newDir string - if valStatus != validValidationStatus { - newDir = path.Join(d.cfg.Directory, failedValidationDir) - } else { - newDir = d.cfg.Directory - } - - // Do we have a configured destination folder? - if d.cfg.Folder != "" { - newDir = path.Join(newDir, d.cfg.Folder) - } else { - newDir = path.Join(newDir, lower, strconv.Itoa(initialReleaseDate.Year())) - } - - if newDir != lastDir { - if err := d.mkdirAll(newDir, 0755); err != nil { - errorCh <- err - continue - } - lastDir = newDir - } - - // Write advisory to file - path := filepath.Join(lastDir, filename) - - // Write data to disk. - for _, x := range []struct { - p string - d []byte - }{ - {path, data.Bytes()}, - {path + ".sha256", s256Data}, - {path + ".sha512", s512Data}, - {path + ".asc", signData}, - } { - if x.d != nil { - if err := os.WriteFile(x.p, x.d, 0644); err != nil { - errorCh <- err - continue nextAdvisory - } - } - } - - stats.succeeded++ - slog.Info("Written advisory", "path", path) } } From 5437d8127a8245ea5da2d7162c63c844e16156e9 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 17 Mar 2025 09:10:03 +0100 Subject: [PATCH 108/176] Store downloader in context --- cmd/csaf_downloader/downloader.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 5af7f5e..f0778ee 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -432,6 +432,7 @@ type downloadContext struct { func newDownloadContext(d *downloader, label csaf.TLPLabel) *downloadContext { dc := &downloadContext{ + d: d, client: d.httpClient(), lower: strings.ToLower(string(label)), expr: util.NewPathEval(), From 5709b14650682d1d9e5614ba586d3dc96a0aa27a Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 19 Mar 2025 09:04:19 +0100 Subject: [PATCH 109/176] Extend structured logging usage in aggregator (#622) * Extend structured logging usage in aggregator * Use structured logging in advisories processor * Remove unnecessary inner function * Format * Feat: Add verbose flag to example aggregator toml (in comment) --------- Co-authored-by: JanHoefelmeyer --- cmd/csaf_aggregator/config.go | 15 ++++++++---- cmd/csaf_aggregator/mirror.go | 7 +++--- csaf/advisories.go | 43 +++++++++++++++++------------------ docs/examples/aggregator.toml | 1 + 4 files changed, 36 insertions(+), 30 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 3c2c46b..55a7193 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -264,8 +264,14 @@ func (c *config) privateOpenPGPKey() (*crypto.Key, error) { return c.key, c.keyErr } -func (c *config) httpClient(p *provider) util.Client { +// httpLog does structured logging in a [util.LoggingClient]. +func httpLog(method, url string) { + slog.Debug("http", + "method", method, + "url", url) +} +func (c *config) httpClient(p *provider) util.Client { hClient := http.Client{} var tlsConfig tls.Config @@ -310,7 +316,10 @@ func (c *config) httpClient(p *provider) util.Client { } if c.Verbose { - client = &util.LoggingClient{Client: client} + client = &util.LoggingClient{ + Client: client, + Log: httpLog, + } } if p.Rate == nil && c.Rate == nil { @@ -331,7 +340,6 @@ func (c *config) httpClient(p *provider) util.Client { } func (c *config) checkProviders() error { - if !c.AllowSingleProvider && len(c.Providers) < 2 { return errors.New("need at least two providers") } @@ -471,7 +479,6 @@ func (c *config) prepareCertificates() error { // prepare prepares internal state of a loaded configuration. func (c *config) prepare() error { - if len(c.Providers) == 0 { return errors.New("no providers given in configuration") } diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index c90ef68..e7c5154 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -462,8 +462,9 @@ func (w *worker) extractCategories(label string, advisory any) error { expr := cat[len(exprPrefix):] // Compile first to check that the expression is okay. if _, err := w.expr.Compile(expr); err != nil { - fmt.Printf("Compiling category expression %q failed: %v\n", - expr, err) + slog.Error("Compiling category expression failed", + "expr", expr, + "err", err) continue } // Ignore errors here as they result from not matching. @@ -588,12 +589,10 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) if err := os.MkdirAll(yearDir, 0755); err != nil { return err } - //log.Printf("created %s\n", yearDir) yearDirs[year] = yearDir } fname := filepath.Join(yearDir, filename) - //log.Printf("write: %s\n", fname) data := content.Bytes() if err := writeFileHashes( fname, filename, diff --git a/csaf/advisories.go b/csaf/advisories.go index df23935..ef3fea8 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -9,10 +9,10 @@ package csaf import ( + "context" "encoding/csv" "fmt" "io" - "log" "log/slog" "net/http" "net/url" @@ -91,7 +91,7 @@ func (daf DirectoryAdvisoryFile) LogValue() slog.Value { // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { AgeAccept func(time.Time) bool - Log func(format string, args ...any) + Log func(loglevel slog.Level, format string, args ...any) client util.Client expr *util.PathEval doc any @@ -131,8 +131,8 @@ func (afp *AdvisoryFileProcessor) Process( ) error { lg := afp.Log if lg == nil { - lg = func(format string, args ...any) { - log.Printf("AdvisoryFileProcessor.Process: "+format, args...) + lg = func(loglevel slog.Level, format string, args ...any) { + slog.Log(context.Background(), loglevel, "AdvisoryFileProcessor.Process: "+format, args...) } } @@ -140,7 +140,7 @@ func (afp *AdvisoryFileProcessor) Process( rolie, err := afp.expr.Eval( "$.distributions[*].rolie.feeds", afp.doc) if err != nil { - lg("rolie check failed: %v\n", err) + lg(slog.LevelError, "rolie check failed", "err", err) return err } @@ -152,7 +152,7 @@ func (afp *AdvisoryFileProcessor) Process( if err := util.ReMarshalJSON(&feeds, rolie); err != nil { return err } - lg("Found %d ROLIE feed(s).\n", len(feeds)) + lg(slog.LevelInfo, "Found ROLIE feed(s)", "length", len(feeds)) for _, feed := range feeds { if err := afp.processROLIE(feed, fn); err != nil { @@ -168,12 +168,12 @@ func (afp *AdvisoryFileProcessor) Process( var dirURLs []string if err != nil { - lg("extracting directory URLs failed: %v\n", err) + lg(slog.LevelError, "extracting directory URLs failed", "err", err) } else { var ok bool dirURLs, ok = util.AsStrings(directoryURLs) if !ok { - lg("directory_urls are not strings.\n") + lg(slog.LevelError, "directory_urls are not strings") } } @@ -209,9 +209,8 @@ func (afp *AdvisoryFileProcessor) Process( // prefixed by baseURL/. func (afp *AdvisoryFileProcessor) loadChanges( baseURL string, - lg func(string, ...any), + lg func(slog.Level, string, ...any), ) ([]AdvisoryFile, error) { - base, err := url.Parse(baseURL) if err != nil { return nil, err @@ -244,12 +243,12 @@ func (afp *AdvisoryFileProcessor) loadChanges( return nil, err } if len(r) < 2 { - lg("%q has not enough columns in line %d", line) + lg(slog.LevelError, "Not enough columns", "line", line) continue } t, err := time.Parse(time.RFC3339, r[timeColumn]) if err != nil { - lg("%q has an invalid time stamp in line %d: %v", changesURL, line, err) + lg(slog.LevelError, "Invalid time stamp in line", "url", changesURL, "line", line, "err", err) continue } // Apply date range filtering. @@ -258,7 +257,7 @@ func (afp *AdvisoryFileProcessor) loadChanges( } path := r[pathColumn] if _, err := url.Parse(path); err != nil { - lg("%q contains an invalid URL %q in line %d", changesURL, path, line) + lg(slog.LevelError, "Contains an invalid URL", "url", changesURL, "path", path, "line", line) continue } @@ -279,31 +278,31 @@ func (afp *AdvisoryFileProcessor) processROLIE( } up, err := url.Parse(string(*feed.URL)) if err != nil { - log.Printf("Invalid URL %s in feed: %v.", *feed.URL, err) + slog.Error("Invalid URL in feed", "feed", *feed.URL, "err", err) continue } feedURL := afp.base.ResolveReference(up) - log.Printf("Feed URL: %s\n", feedURL) + slog.Info("Got feed URL", "feed", feedURL) fb, err := util.BaseURL(feedURL) if err != nil { - log.Printf("error: Invalid feed base URL '%s': %v\n", fb, err) + slog.Error("Invalid feed base URL", "url", fb, "err", err) continue } feedBaseURL, err := url.Parse(fb) if err != nil { - log.Printf("error: Cannot parse feed base URL '%s': %v\n", fb, err) + slog.Error("Cannot parse feed base URL", "url", fb, "err", err) continue } res, err := afp.client.Get(feedURL.String()) if err != nil { - log.Printf("error: Cannot get feed '%s'\n", err) + slog.Error("Cannot get feed", "err", err) continue } if res.StatusCode != http.StatusOK { - log.Printf("error: Fetching %s failed. Status code %d (%s)", - feedURL, res.StatusCode, res.Status) + slog.Error("Fetching failed", + "url", feedURL, "status_code", res.StatusCode, "status", res.Status) continue } rfeed, err := func() (*ROLIEFeed, error) { @@ -311,7 +310,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( return LoadROLIEFeed(res.Body) }() if err != nil { - log.Printf("Loading ROLIE feed failed: %v.", err) + slog.Error("Loading ROLIE feed failed", "err", err) continue } @@ -323,7 +322,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( } p, err := url.Parse(u) if err != nil { - log.Printf("error: Invalid URL '%s': %v", u, err) + slog.Error("Invalid URL", "url", u, "err", err) return "" } return feedBaseURL.ResolveReference(p).String() diff --git a/docs/examples/aggregator.toml b/docs/examples/aggregator.toml index 2161079..8d4ee80 100644 --- a/docs/examples/aggregator.toml +++ b/docs/examples/aggregator.toml @@ -5,6 +5,7 @@ web = "/var/csaf_aggregator/html" domain = "https://localhost:9443" rate = 10.0 insecure = true +#verbose = false #openpgp_private_key = #openpgp_public_key = #interim_years = From 0848143a0bbcd83cecf626be7d8379759121de53 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 19 Mar 2025 09:39:07 +0100 Subject: [PATCH 110/176] Update lint (#626) * Update linter * Format * Fix lint --- .github/workflows/go.yml | 6 +++--- cmd/csaf_aggregator/client_test.go | 4 ++-- cmd/csaf_downloader/downloader_test.go | 6 ++---- cmd/csaf_downloader/forwarder.go | 6 +++--- cmd/csaf_provider/main.go | 2 +- internal/options/options_test.go | 9 ++++----- util/file_test.go | 2 +- 7 files changed, 16 insertions(+), 19 deletions(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 95ee8c7..b86309f 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -17,7 +17,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v4 with: - go-version: 'stable' + go-version: "stable" - name: Build run: go build -v ./cmd/... @@ -31,10 +31,10 @@ jobs: gofmt-flags: "-l -d" - name: golint - uses: Jerome1337/golint-action@v1.0.2 + uses: Jerome1337/golint-action@v1.0.3 - name: Revive Action - uses: morphy2k/revive-action@v2.5.1 + uses: morphy2k/revive-action@v2.7.4 - name: Tests run: go test -v ./... diff --git a/cmd/csaf_aggregator/client_test.go b/cmd/csaf_aggregator/client_test.go index fc5b095..3617ce6 100644 --- a/cmd/csaf_aggregator/client_test.go +++ b/cmd/csaf_aggregator/client_test.go @@ -49,10 +49,10 @@ func Test_downloadJSON(t *testing.T) { test := testToRun t.Run(test.name, func(tt *testing.T) { tt.Parallel() - found := func(r io.Reader) error { + found := func(_ io.Reader) error { return nil } - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.Header().Add("Content-Type", test.contentType) w.WriteHeader(test.statusCode) })) diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go index d7eaae3..1485ec9 100644 --- a/cmd/csaf_downloader/downloader_test.go +++ b/cmd/csaf_downloader/downloader_test.go @@ -24,12 +24,10 @@ import ( func checkIfFileExists(path string, t *testing.T) bool { if _, err := os.Stat(path); err == nil { return true - } else if errors.Is(err, os.ErrNotExist) { - return false - } else { + } else if !errors.Is(err, os.ErrNotExist) { t.Fatalf("Failed to check if file exists: %v", err) - return false } + return false } func TestShaMarking(t *testing.T) { diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index 1598283..ac2c336 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -224,12 +224,12 @@ func (f *forwarder) storeFailed(filename, doc, sha256, sha512 string) { // limitedString reads max bytes from reader and returns it as a string. // Longer strings are indicated by "..." as a suffix. -func limitedString(r io.Reader, max int) (string, error) { +func limitedString(r io.Reader, maxLength int) (string, error) { var msg strings.Builder - if _, err := io.Copy(&msg, io.LimitReader(r, int64(max))); err != nil { + if _, err := io.Copy(&msg, io.LimitReader(r, int64(maxLength))); err != nil { return "", err } - if msg.Len() >= max { + if msg.Len() >= maxLength { msg.WriteString("...") } return msg.String(), nil diff --git a/cmd/csaf_provider/main.go b/cmd/csaf_provider/main.go index 6c858c9..3faebfe 100644 --- a/cmd/csaf_provider/main.go +++ b/cmd/csaf_provider/main.go @@ -48,7 +48,7 @@ func main() { cfg, err := loadConfig() if err != nil { - cgi.Serve(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + cgi.Serve(http.HandlerFunc(func(rw http.ResponseWriter, _ *http.Request) { http.Error(rw, "Something went wrong. Check server logs for more details", http.StatusInternalServerError) })) diff --git a/internal/options/options_test.go b/internal/options/options_test.go index 9aab23b..2768e37 100644 --- a/internal/options/options_test.go +++ b/internal/options/options_test.go @@ -37,10 +37,10 @@ func TestParse(t *testing.T) { }, Usage: "[OPTIONS] domain...", HasVersion: func(cfg *config) bool { return cfg.Version }, - SetDefaults: func(cfg *config) { + SetDefaults: func(_ *config) { }, // Re-establish default values if not set. - EnsureDefaults: func(cfg *config) { + EnsureDefaults: func(_ *config) { }, } @@ -157,7 +157,6 @@ func TestErrorCheck(t *testing.T) { return } t.Fatalf("process ran with err %v, want exit status 1", err) - } // TestSecondPassCommandlineParsing checks if the second pass @@ -168,7 +167,7 @@ func TestSecondPassCommandlineParsing(t *testing.T) { os.Args = []string{"cmd"} p := Parser[config]{ - ConfigLocation: func(cfg *config) string { + ConfigLocation: func(_ *config) string { // This is a bit stupid. os.Args = []string{"cmd", "--invalid"} return "data/empty.toml" @@ -188,7 +187,7 @@ func TestSecondPassCommandlineHelp(t *testing.T) { os.Args = []string{"cmd"} p := Parser[config]{ - ConfigLocation: func(cfg *config) string { + ConfigLocation: func(_ *config) string { // This is a bit stupid. os.Args = []string{"cmd", "--help"} return "data/empty.toml" diff --git a/util/file_test.go b/util/file_test.go index 28c5196..ab2a208 100644 --- a/util/file_test.go +++ b/util/file_test.go @@ -155,7 +155,7 @@ func TestMakeUniqFile(t *testing.T) { func Test_mkUniq(t *testing.T) { dir := t.TempDir() - name, err := mkUniq(dir+"/", func(name string) error { + name, err := mkUniq(dir+"/", func(_ string) error { return nil }) if err != nil { From 2c5ef1fd5f47a8c9ad34526a5eef64a2c8b28f9f Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 24 Mar 2025 13:32:43 +0100 Subject: [PATCH 111/176] Avoid memory leak Move `resp.Body.Close()` before check of status code. Reported by @mgoetzegb here: https://github.com/gocsaf/csaf/pull/625#issuecomment-2744067770 --- cmd/csaf_downloader/downloader.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index f0778ee..bcef357 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -781,11 +781,11 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, if err != nil { return nil, nil, err } + defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return nil, nil, fmt.Errorf( "fetching signature from '%s' failed: %s (%d)", p, resp.Status, resp.StatusCode) } - defer resp.Body.Close() data, err := io.ReadAll(resp.Body) if err != nil { return nil, nil, err @@ -846,11 +846,11 @@ func loadHash(client util.Client, p string) ([]byte, []byte, error) { if err != nil { return nil, nil, err } + defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return nil, nil, fmt.Errorf( "fetching hash from '%s' failed: %s (%d)", p, resp.Status, resp.StatusCode) } - defer resp.Body.Close() var data bytes.Buffer tee := io.TeeReader(resp.Body, &data) hash, err := util.HashFromReader(tee) From 2f599ab0175d0d89748f4d539afdc51024332b97 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 2 Apr 2025 17:05:29 +0200 Subject: [PATCH 112/176] Fix aggregator URL handling (#631) * Fix aggregator URL handling Parts of the URL were not path escaped. This results in a wrong URL; if the provider name contains characters that need to be escaped. * Simplify JoinPath usage --- cmd/csaf_aggregator/indices.go | 68 +++++++++++++++++++++----------- cmd/csaf_aggregator/mirror.go | 32 +++++++++------ cmd/csaf_aggregator/processor.go | 13 ++++++ 3 files changed, 78 insertions(+), 35 deletions(-) diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 17c8d3a..976d9a3 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -183,19 +183,26 @@ func (w *worker) writeROLIENoSummaries(label string) error { fname := "csaf-feed-tlp-" + labelFolder + ".json" - feedURL := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + labelFolder + "/" + fname + feedURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + feedURL = feedURL.JoinPath(labelFolder, fname) links := []csaf.Link{{ Rel: "self", - HRef: feedURL, + HRef: feedURL.String(), }} if w.provider.serviceDocument(w.processor.cfg) { + serviceURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + serviceURL = serviceURL.JoinPath("service.json") links = append(links, csaf.Link{ - Rel: "service", - HRef: w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/service.json", + Rel: "service", + HRef: serviceURL.String(), }) } @@ -223,8 +230,11 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { fname := "csaf-feed-tlp-" + labelFolder + ".json" - feedURL := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + labelFolder + "/" + fname + feedURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + feedURL = feedURL.JoinPath(labelFolder, fname) entries := make([]*csaf.Entry, len(summaries)) @@ -236,10 +246,13 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { for i := range summaries { s := &summaries[i] - csafURL := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + label + "/" + - strconv.Itoa(s.summary.InitialReleaseDate.Year()) + "/" + - s.filename + csafURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + csafURLString := csafURL.JoinPath(label, + strconv.Itoa(s.summary.InitialReleaseDate.Year()), + s.filename).String() entries[i] = &csaf.Entry{ ID: s.summary.ID, @@ -247,15 +260,15 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { Published: csaf.TimeStamp(s.summary.InitialReleaseDate), Updated: csaf.TimeStamp(s.summary.CurrentReleaseDate), Link: []csaf.Link{ - {Rel: "self", HRef: csafURL}, - {Rel: "hash", HRef: csafURL + ".sha256"}, - {Rel: "hash", HRef: csafURL + ".sha512"}, - {Rel: "signature", HRef: csafURL + ".asc"}, + {Rel: "self", HRef: csafURLString}, + {Rel: "hash", HRef: csafURLString + ".sha256"}, + {Rel: "hash", HRef: csafURLString + ".sha512"}, + {Rel: "signature", HRef: csafURLString + ".asc"}, }, Format: format, Content: csaf.Content{ Type: "application/json", - Src: csafURL, + Src: csafURLString, }, } if s.summary.Summary != "" { @@ -267,14 +280,18 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { links := []csaf.Link{{ Rel: "self", - HRef: feedURL, + HRef: feedURL.String(), }} if w.provider.serviceDocument(w.processor.cfg) { + serviceURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + serviceURL = serviceURL.JoinPath("service.json") links = append(links, csaf.Link{ - Rel: "service", - HRef: w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/service.json", + Rel: "service", + HRef: serviceURL.String(), }) } @@ -344,12 +361,15 @@ func (w *worker) writeService() error { for _, ts := range labels { feedName := "csaf-feed-tlp-" + ts + ".json" - href := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + ts + "/" + feedName + hrefURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + hrefURL = hrefURL.JoinPath(ts, feedName) collection := csaf.ROLIEServiceWorkspaceCollection{ Title: "CSAF feed (TLP:" + strings.ToUpper(ts) + ")", - HRef: href, + HRef: hrefURL.String(), Categories: categories, } collections = append(collections, collection) diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index e7c5154..1ef5881 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -103,9 +103,13 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) { } // Add us as a mirror. + mirror, err := w.getProviderBaseURL() + if err != nil { + return nil, err + } mirrorURL := csaf.ProviderURL( - fmt.Sprintf("%s/.well-known/csaf-aggregator/%s/provider-metadata.json", - w.processor.cfg.Domain, w.provider.Name)) + mirror.JoinPath("provider-metadata.json").String(), + ) acp.Mirrors = []csaf.ProviderURL{ mirrorURL, @@ -128,8 +132,12 @@ func (w *worker) writeProviderMetadata() error { fname := filepath.Join(w.dir, "provider-metadata.json") + prefixURL, err := w.getProviderBaseURL() + if err != nil { + return err + } pm := csaf.NewProviderMetadataPrefix( - w.processor.cfg.Domain+"/.well-known/csaf-aggregator/"+w.provider.Name, + prefixURL.String(), w.labelsFromSummaries()) // Fill in directory URLs if needed. @@ -139,9 +147,8 @@ func (w *worker) writeProviderMetadata() error { labels = append(labels, label) } sort.Strings(labels) - prefix := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + w.provider.Name + "/" for _, label := range labels { - pm.AddDirectoryDistribution(prefix + label) + pm.AddDirectoryDistribution(prefixURL.JoinPath(label).String()) } } @@ -188,9 +195,12 @@ func (w *worker) mirrorPGPKeys(pm *csaf.ProviderMetadata) error { return err } + keyURL, err := w.getProviderBaseURL() + if err != nil { + return err + } localKeyURL := func(fingerprint string) string { - return fmt.Sprintf("%s/.well-known/csaf-aggregator/%s/openpgp/%s.asc", - w.processor.cfg.Domain, w.provider.Name, fingerprint) + return keyURL.JoinPath("openpgp", (fingerprint + ".asc")).String() } for i := range pm.PGPKeys { @@ -240,8 +250,8 @@ func (w *worker) mirrorPGPKeys(pm *csaf.ProviderMetadata) error { } // replace the URL - url := localKeyURL(fingerprint) - pgpKey.URL = &url + u := localKeyURL(fingerprint) + pgpKey.URL = &u } // If we have public key configured copy it into the new folder @@ -308,7 +318,7 @@ func (w *worker) createAggregatorProvider() (*csaf.AggregatorCSAFProvider, error var ( lastUpdated = csaf.TimeStamp(lastUpdatedT) role = csaf.MetadataRole(roleS) - url = csaf.ProviderURL(urlS) + providerURL = csaf.ProviderURL(urlS) ) return &csaf.AggregatorCSAFProvider{ @@ -316,7 +326,7 @@ func (w *worker) createAggregatorProvider() (*csaf.AggregatorCSAFProvider, error LastUpdated: &lastUpdated, Publisher: &pub, Role: &role, - URL: &url, + URL: &providerURL, }, }, nil } diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index b22e839..0d41df8 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -11,6 +11,7 @@ package main import ( "fmt" "log/slog" + "net/url" "os" "path/filepath" @@ -112,6 +113,18 @@ func (w *worker) locateProviderMetadata(domain string) error { return nil } +// getProviderBaseURL returns the base URL for the provider. +func (w *worker) getProviderBaseURL() (*url.URL, error) { + baseURL, err := url.Parse(w.processor.cfg.Domain) + if err != nil { + return nil, err + } + baseURL = baseURL.JoinPath(".well-known", + "csaf-aggregator", + w.provider.Name) + return baseURL, nil +} + // removeOrphans removes the directories that are not in the providers list. func (p *processor) removeOrphans() error { From 91b5b4543e6577770cf68ad43cab7fc8f331ff05 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 3 Apr 2025 14:41:14 +0200 Subject: [PATCH 113/176] Check if canonical url prefix is valid --- cmd/csaf_provider/config.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/cmd/csaf_provider/config.go b/cmd/csaf_provider/config.go index 826b7bf..5d29b61 100644 --- a/cmd/csaf_provider/config.go +++ b/cmd/csaf_provider/config.go @@ -11,6 +11,7 @@ package main import ( "fmt" "io" + "net/url" "os" "strings" @@ -262,6 +263,14 @@ func loadConfig() (*config, error) { if cfg.CanonicalURLPrefix == "" { cfg.CanonicalURLPrefix = "https://" + os.Getenv("SERVER_NAME") } + // Check if canonical url prefix is invalid + parsedURL, err := url.ParseRequestURI(cfg.CanonicalURLPrefix) + if err != nil { + return nil, err + } + if parsedURL.Scheme != "https" && parsedURL.Scheme != "http" { + return nil, fmt.Errorf("invalid canonical URL: %q", cfg.CanonicalURLPrefix) + } if cfg.TLPs == nil { cfg.TLPs = []tlp{tlpCSAF, tlpWhite, tlpGreen, tlpAmber, tlpRed} From 3ab00e87594ccad74c40534bbad3f4028abdb5f3 Mon Sep 17 00:00:00 2001 From: Christoph Klassen <100708552+cintek@users.noreply.github.com> Date: Wed, 28 May 2025 11:30:46 +0200 Subject: [PATCH 114/176] Remove golint github action We use Revive already which is a replacement for golint and golint isn't maintained anyway. --- .github/workflows/go.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index b86309f..6b07bfd 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -30,9 +30,6 @@ jobs: with: gofmt-flags: "-l -d" - - name: golint - uses: Jerome1337/golint-action@v1.0.3 - - name: Revive Action uses: morphy2k/revive-action@v2.7.4 From fc64bf71650ed878452079c34bab5b78728e409a Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 12 Jun 2025 15:47:24 +0200 Subject: [PATCH 115/176] Upgrade jsonschema to v6 --- csaf/validation.go | 64 +++++++++++++++++++------- go.mod | 32 ++++++------- go.sum | 110 ++++++++++++++++++++++++++++----------------- 3 files changed, 135 insertions(+), 71 deletions(-) diff --git a/csaf/validation.go b/csaf/validation.go index 73e732c..3faf549 100644 --- a/csaf/validation.go +++ b/csaf/validation.go @@ -10,13 +10,17 @@ package csaf import ( "bytes" + "crypto/tls" _ "embed" // Used for embedding. - "io" + "errors" + "fmt" + "net/http" "sort" "strings" "sync" + "time" - "github.com/santhosh-tekuri/jsonschema/v5" + "github.com/santhosh-tekuri/jsonschema/v6" ) //go:embed schema/csaf_json_schema.json @@ -64,13 +68,29 @@ var ( compiledRolieSchema = compiledSchema{url: rolieSchemaURL} ) -// loadURL loads the content of an URL from embedded data or -// falls back to the global loader function of the jsonschema package. -func loadURL(s string) (io.ReadCloser, error) { - loader := func(data []byte) (io.ReadCloser, error) { - return io.NopCloser(bytes.NewReader(data)), nil +type schemaLoader http.Client + +func (l *schemaLoader) loadHTTPURL(url string) (any, error) { + client := (*http.Client)(l) + resp, err := client.Get(url) + if err != nil { + return nil, err } - switch s { + if resp.StatusCode != http.StatusOK { + _ = resp.Body.Close() + return nil, fmt.Errorf("%s returned status code %d", url, resp.StatusCode) + } + defer resp.Body.Close() + + return jsonschema.UnmarshalJSON(resp.Body) +} + +// Load loads the schema from the specified url. +func (l *schemaLoader) Load(url string) (any, error) { + loader := func(data []byte) (any, error) { + return jsonschema.UnmarshalJSON(bytes.NewReader(data)) + } + switch url { case csafSchemaURL: return loader(csafSchema) case cvss20SchemaURL: @@ -86,14 +106,27 @@ func loadURL(s string) (io.ReadCloser, error) { case rolieSchemaURL: return loader(rolieSchema) default: - return jsonschema.LoadURL(s) + // Fallback to http loader + return l.loadHTTPURL(url) } } +func newSchemaLoader(insecure bool) *schemaLoader { + httpLoader := schemaLoader(http.Client{ + Timeout: 15 * time.Second, + }) + if insecure { + httpLoader.Transport = &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + } + return &httpLoader +} + func (cs *compiledSchema) compile() { c := jsonschema.NewCompiler() - c.AssertFormat = true - c.LoadURL = loadURL + c.AssertFormat() + c.UseLoader(newSchemaLoader(false)) cs.compiled, cs.err = c.Compile(cs.url) } @@ -109,7 +142,8 @@ func (cs *compiledSchema) validate(doc any) ([]string, error) { return nil, nil } - valErr, ok := err.(*jsonschema.ValidationError) + var valErr *jsonschema.ValidationError + ok := errors.As(err, &valErr) if !ok { return nil, err } @@ -133,21 +167,21 @@ func (cs *compiledSchema) validate(doc any) ([]string, error) { if pi != pj { return pi < pj } - return errs[i].Error < errs[j].Error + return errs[i].Error.String() < errs[j].Error.String() }) res := make([]string, 0, len(errs)) for i := range errs { e := &errs[i] - if e.Error == "" { + if e.Error == nil { continue } loc := e.InstanceLocation if loc == "" { loc = e.AbsoluteKeywordLocation } - res = append(res, loc+": "+e.Error) + res = append(res, loc+": "+e.Error.String()) } return res, nil diff --git a/go.mod b/go.mod index 1ef2216..5a27126 100644 --- a/go.mod +++ b/go.mod @@ -1,31 +1,33 @@ module github.com/gocsaf/csaf/v3 -go 1.22.9 +go 1.23.0 + +toolchain go1.24.4 require ( - github.com/BurntSushi/toml v1.4.0 + github.com/BurntSushi/toml v1.5.0 github.com/Intevation/gval v1.3.0 github.com/Intevation/jsonpath v0.2.1 - github.com/ProtonMail/gopenpgp/v2 v2.8.0 - github.com/PuerkitoBio/goquery v1.8.1 + github.com/ProtonMail/gopenpgp/v2 v2.9.0 + github.com/PuerkitoBio/goquery v1.10.3 github.com/gofrs/flock v0.12.1 github.com/jessevdk/go-flags v1.6.1 github.com/mitchellh/go-homedir v1.1.0 - github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 - go.etcd.io/bbolt v1.3.11 - golang.org/x/crypto v0.29.0 - golang.org/x/term v0.26.0 - golang.org/x/time v0.8.0 + github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 + go.etcd.io/bbolt v1.4.1 + golang.org/x/crypto v0.39.0 + golang.org/x/term v0.32.0 + golang.org/x/time v0.12.0 ) require ( - github.com/ProtonMail/go-crypto v1.1.2 // indirect + github.com/ProtonMail/go-crypto v1.3.0 // indirect github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect - github.com/andybalholm/cascadia v1.3.2 // indirect - github.com/cloudflare/circl v1.5.0 // indirect + github.com/andybalholm/cascadia v1.3.3 // indirect + github.com/cloudflare/circl v1.6.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.4.0 // indirect - golang.org/x/net v0.31.0 // indirect - golang.org/x/sys v0.27.0 // indirect - golang.org/x/text v0.20.0 // indirect + golang.org/x/net v0.41.0 // indirect + golang.org/x/sys v0.33.0 // indirect + golang.org/x/text v0.26.0 // indirect ) diff --git a/go.sum b/go.sum index 47637e9..1f5b5b4 100644 --- a/go.sum +++ b/go.sum @@ -1,28 +1,30 @@ -github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0= -github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= +github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/Intevation/gval v1.3.0 h1:+Ze5sft5MmGbZrHj06NVUbcxCb67l9RaPTLMNr37mjw= github.com/Intevation/gval v1.3.0/go.mod h1:xmGyGpP5be12EL0P12h+dqiYG8qn2j3PJxIgkoOHO5o= github.com/Intevation/jsonpath v0.2.1 h1:rINNQJ0Pts5XTFEG+zamtdL7l9uuE1z0FBA+r55Sw+A= github.com/Intevation/jsonpath v0.2.1/go.mod h1:WnZ8weMmwAx/fAO3SutjYFU+v7DFreNYnibV7CiaYIw= -github.com/ProtonMail/go-crypto v1.1.2 h1:A7JbD57ThNqh7XjmHE+PXpQ3Dqt3BrSAC0AL0Go3KS0= -github.com/ProtonMail/go-crypto v1.1.2/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw= +github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ekTTXpdwKYF8eBlsYsDVoggDAuAjoK66k= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw= -github.com/ProtonMail/gopenpgp/v2 v2.8.0 h1:WvMv3CMcFsqKSM4/Qf8sf3tgyQkzDqQmoSE49bnBuP4= -github.com/ProtonMail/gopenpgp/v2 v2.8.0/go.mod h1:qb2GUSnmA9ipBW5GVtCtEhkummSlqs2A8Ar3S0HBgSY= -github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM= -github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ= -github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA= -github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss= -github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU= -github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys= -github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= +github.com/ProtonMail/gopenpgp/v2 v2.9.0 h1:ruLzBmwe4dR1hdnrsEJ/S7psSBmV15gFttFUPP/+/kE= +github.com/ProtonMail/gopenpgp/v2 v2.9.0/go.mod h1:IldDyh9Hv1ZCCYatTuuEt1XZJ0OPjxLpTarDfglih7s= +github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo= +github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y= +github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM= +github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA= +github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= +github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= +github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4= github.com/jessevdk/go-flags v1.6.1/go.mod h1:Mk8T1hIAWpOiJiHa9rJASDK2UGWji0EuPGBnNLMooyc= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= @@ -31,67 +33,93 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= -github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 h1:KRzFb2m7YtdldCEkzs6KqmJw4nqEVZGK7IN2kJkjTuQ= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= -github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0= -go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I= +go.etcd.io/bbolt v1.4.1 h1:5mOV+HWjIPLEAlUGMsveaUvK2+byZMFOzojoi7bh7uI= +go.etcd.io/bbolt v1.4.1/go.mod h1:c8zu2BnXWTu2XM4XcICtbGSl9cFwsXtcf9zLt2OncM8= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= -golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= +golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= -golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= -golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= +golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= +golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= -golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= +golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= -golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= -golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= -golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= +golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= +golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= -golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= -golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg= -golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= +golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= From 6955c4e37c0462d8cc810e31a3b15e5d6a57b77d Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Jun 2025 10:19:21 +0200 Subject: [PATCH 116/176] Upgrade node.js and format workflow file --- .github/workflows/itest.yml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index 8bc87d5..a99c269 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -5,16 +5,15 @@ jobs: build: runs-on: ubuntu-latest steps: - - name: Set up Go uses: actions/setup-go@v5 with: - go-version: '^1.23.6' + go-version: "^1.23.6" - name: Set up Node.js uses: actions/setup-node@v3 with: - node-version: 16 + node-version: 24 - name: Checkout uses: actions/checkout@v4 @@ -38,8 +37,8 @@ jobs: - name: Upload test results uses: actions/upload-artifact@v4 with: - name: checker-results - path: | - ~/checker-results.html - ~/checker-results-no-clientcert.json - if-no-files-found: error + name: checker-results + path: | + ~/checker-results.html + ~/checker-results-no-clientcert.json + if-no-files-found: error From 34705f3c6e3dcc73b6708e01b91b7e47980bcc52 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Jun 2025 11:00:22 +0200 Subject: [PATCH 117/176] Address comments --- csaf/validation.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/csaf/validation.go b/csaf/validation.go index 3faf549..598d0fa 100644 --- a/csaf/validation.go +++ b/csaf/validation.go @@ -76,11 +76,10 @@ func (l *schemaLoader) loadHTTPURL(url string) (any, error) { if err != nil { return nil, err } + defer resp.Body.Close() if resp.StatusCode != http.StatusOK { - _ = resp.Body.Close() return nil, fmt.Errorf("%s returned status code %d", url, resp.StatusCode) } - defer resp.Body.Close() return jsonschema.UnmarshalJSON(resp.Body) } From dcdbc5d49d951ac677a1e39039c3506aaf65304c Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 6 Jun 2025 22:49:11 +0200 Subject: [PATCH 118/176] Add semver breaking changes detection --- .github/workflows/go.yml | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index b86309f..bed2620 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -12,7 +12,8 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - name: Checkout + uses: actions/checkout@v4 - name: Set up Go uses: actions/setup-go@v4 @@ -38,3 +39,27 @@ jobs: - name: Tests run: go test -v ./... + + run_modver: + runs-on: ubuntu-latest + needs: build # Only run when build job was successful + if: ${{ github.event_name == 'pull_request' && success() }} + permissions: + contents: read # Modver needs to read the repo content + pull-requests: write # Modver needs to write comments/status on PRs + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Modver needs full history for comparison + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version: "stable" + + - name: Modver + uses: bobg/modver@v2.5.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + pull_request_url: https://github.com/${{ github.repository }}/pull/${{ github.event.number }} From cb291bb81b5cd562e906e69b403421e99a978534 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 19 Jun 2025 12:03:52 +0200 Subject: [PATCH 119/176] Update modver --- .github/workflows/go.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 7f21af0..b3f5389 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -56,7 +56,7 @@ jobs: go-version: "stable" - name: Modver - uses: bobg/modver@v2.5.0 + uses: bobg/modver@v2.11.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} pull_request_url: https://github.com/${{ github.repository }}/pull/${{ github.event.number }} From 6ac97810d0337b385633a2a1c8a8f80c6a71b478 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 19 Jun 2025 15:11:45 +0200 Subject: [PATCH 120/176] Use JoinPath This avoids issues where parts of the URL are discarded. --- cmd/csaf_checker/links.go | 3 ++- cmd/csaf_checker/processor.go | 11 ++++++----- cmd/csaf_checker/roliecheck.go | 7 ++++--- cmd/csaf_downloader/downloader.go | 2 +- csaf/advisories.go | 5 +++-- internal/misc/url.go | 21 +++++++++++++++++++++ 6 files changed, 37 insertions(+), 12 deletions(-) create mode 100644 internal/misc/url.go diff --git a/cmd/csaf_checker/links.go b/cmd/csaf_checker/links.go index a323661..c7aec57 100644 --- a/cmd/csaf_checker/links.go +++ b/cmd/csaf_checker/links.go @@ -9,6 +9,7 @@ package main import ( + "github.com/gocsaf/csaf/v3/internal/misc" "io" "net/http" "net/url" @@ -93,7 +94,7 @@ func (pgs pages) listed( return err } // Links may be relative - abs := baseURL.ResolveReference(u).String() + abs := misc.JoinURL(baseURL, u).String() content.links.Add(abs) return nil }) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index ae79133..c0c4437 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -18,6 +18,7 @@ import ( "encoding/json" "errors" "fmt" + "github.com/gocsaf/csaf/v3/internal/misc" "io" "log" "net/http" @@ -644,7 +645,7 @@ func (p *processor) integrity( } fp = makeAbs(fp) - u := b.ResolveReference(fp).String() + u := misc.JoinURL(b, fp).String() // Should this URL be ignored? if p.cfg.ignoreURL(u) { @@ -777,7 +778,7 @@ func (p *processor) integrity( continue } hu = makeAbs(hu) - hashFile := b.ResolveReference(hu).String() + hashFile := misc.JoinURL(b, hu).String() p.checkTLS(hashFile) if res, err = client.Get(hashFile); err != nil { @@ -827,7 +828,7 @@ func (p *processor) integrity( continue } su = makeAbs(su) - sigFile := b.ResolveReference(su).String() + sigFile := misc.JoinURL(b, su).String() p.checkTLS(sigFile) p.badSignatures.use() @@ -1374,7 +1375,7 @@ func (p *processor) checkSecurityFolder(folder string) string { return err.Error() } - u = base.ResolveReference(up).String() + u = misc.JoinURL(base, up).String() p.checkTLS(u) if res, err = client.Get(u); err != nil { return fmt.Sprintf("Cannot fetch %s from security.txt: %v", u, err) @@ -1539,7 +1540,7 @@ func (p *processor) checkPGPKeys(_ string) error { continue } - u := base.ResolveReference(up).String() + u := misc.JoinURL(base, up).String() p.checkTLS(u) res, err := client.Get(u) diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index 28bd437..0a9ff04 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -10,6 +10,7 @@ package main import ( "errors" + "github.com/gocsaf/csaf/v3/internal/misc" "net/http" "net/url" "sort" @@ -237,7 +238,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err) continue } - feedBase := base.ResolveReference(up) + feedBase := misc.JoinURL(base, up) feedURL := feedBase.String() p.checkTLS(feedURL) @@ -270,7 +271,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { continue } - feedURL := base.ResolveReference(up) + feedURL := misc.JoinURL(base, up) feedBase, err := util.BaseURL(feedURL) if err != nil { p.badProviderMetadata.error("Bad base path: %v", err) @@ -325,7 +326,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { continue } - feedBase := base.ResolveReference(up) + feedBase := misc.JoinURL(base, up) makeAbs := makeAbsolute(feedBase) label := defaults(feed.TLPLabel, csaf.TLPLabelUnlabeled) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index bcef357..90e3ac3 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -343,7 +343,7 @@ func (d *downloader) loadOpenPGPKeys( continue } - u := base.ResolveReference(up).String() + u := base.JoinPath(up.Path).String() res, err := client.Get(u) if err != nil { diff --git a/csaf/advisories.go b/csaf/advisories.go index ef3fea8..e7bc11a 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -12,6 +12,7 @@ import ( "context" "encoding/csv" "fmt" + "github.com/gocsaf/csaf/v3/internal/misc" "io" "log/slog" "net/http" @@ -281,7 +282,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( slog.Error("Invalid URL in feed", "feed", *feed.URL, "err", err) continue } - feedURL := afp.base.ResolveReference(up) + feedURL := misc.JoinURL(afp.base, up) slog.Info("Got feed URL", "feed", feedURL) fb, err := util.BaseURL(feedURL) @@ -325,7 +326,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( slog.Error("Invalid URL", "url", u, "err", err) return "" } - return feedBaseURL.ResolveReference(p).String() + return misc.JoinURL(feedBaseURL, p).String() } rfeed.Entries(func(entry *Entry) { diff --git a/internal/misc/url.go b/internal/misc/url.go new file mode 100644 index 0000000..2256a94 --- /dev/null +++ b/internal/misc/url.go @@ -0,0 +1,21 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2025 German Federal Office for Information Security (BSI) +// Software-Engineering: 2025 Intevation GmbH + +package misc + +import "net/url" + +// JoinURL joins the two URLs while preserving the query and fragment part of the latter. +func JoinURL(baseURL *url.URL, relativeURL *url.URL) *url.URL { + u := baseURL.JoinPath(relativeURL.Path) + u.RawQuery = relativeURL.RawQuery + u.RawFragment = relativeURL.RawFragment + // Enforce https, this is required if the base url was only a domain + u.Scheme = "https" + return u +} From 091854a2480e92e705a67f03c7ad621270216439 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 19 Jun 2025 11:39:54 +0200 Subject: [PATCH 121/176] Always generate report Closes #385 --- cmd/csaf_checker/processor.go | 3 --- 1 file changed, 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index ae79133..a574a5d 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -253,12 +253,10 @@ func (p *processor) run(domains []string) (*Report, error) { if !p.checkProviderMetadata(d) { // We cannot build a report if the provider metadata cannot be parsed. log.Printf("Could not parse the Provider-Metadata.json of: %s\n", d) - continue } if err := p.checkDomain(d); err != nil { log.Printf("Failed to find valid provider-metadata.json for domain %s: %v. "+ "Continuing with next domain.", d, err) - continue } domain := &Domain{Name: d} @@ -1431,7 +1429,6 @@ func (p *processor) checkDNS(domain string) { // checkWellknown checks if the provider-metadata.json file is // available under the /.well-known/csaf/ directory. func (p *processor) checkWellknown(domain string) { - p.badWellknownMetadata.use() client := p.httpClient() path := "https://" + domain + "/.well-known/csaf/provider-metadata.json" From 1098c6add07755d8f628edd90d3d5bc67796f812 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 20 Jun 2025 14:46:26 +0200 Subject: [PATCH 122/176] Use correct base URL --- cmd/csaf_checker/processor.go | 5 +---- cmd/csaf_checker/roliecheck.go | 3 ++- cmd/csaf_downloader/downloader.go | 1 + csaf/advisories.go | 1 + 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index c0c4437..bfaf9e1 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -632,7 +632,6 @@ func (p *processor) integrity( if err != nil { return err } - makeAbs := makeAbsolute(b) client := p.httpClient() var data bytes.Buffer @@ -643,7 +642,6 @@ func (p *processor) integrity( lg(ErrorType, "Bad URL %s: %v", f, err) continue } - fp = makeAbs(fp) u := misc.JoinURL(b, fp).String() @@ -777,7 +775,6 @@ func (p *processor) integrity( lg(ErrorType, "Bad URL %s: %v", x.url(), err) continue } - hu = makeAbs(hu) hashFile := misc.JoinURL(b, hu).String() p.checkTLS(hashFile) @@ -827,7 +824,6 @@ func (p *processor) integrity( lg(ErrorType, "Bad URL %s: %v", f.SignURL(), err) continue } - su = makeAbs(su) sigFile := misc.JoinURL(b, su).String() p.checkTLS(sigFile) @@ -1527,6 +1523,7 @@ func (p *processor) checkPGPKeys(_ string) error { if err != nil { return err } + base.Path = "" for i := range keys { key := &keys[i] diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index 0a9ff04..ace4d0d 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -222,6 +222,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { if err != nil { return err } + base.Path = "" p.badROLIEFeed.use() advisories := map[*csaf.Feed][]csaf.AdvisoryFile{} @@ -291,7 +292,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { // TODO: Issue a warning if we want check AMBER+ without an // authorizing client. - if err := p.integrity(files, feedBase, rolieMask, p.badProviderMetadata.add); err != nil { + if err := p.integrity(files, base.String(), rolieMask, p.badProviderMetadata.add); err != nil { if err != errContinue { return err } diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 90e3ac3..2b08544 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -229,6 +229,7 @@ func (d *downloader) download(ctx context.Context, domain string) error { if err != nil { return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err) } + base.Path = "" expr := util.NewPathEval() diff --git a/csaf/advisories.go b/csaf/advisories.go index e7bc11a..c5e4fea 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -295,6 +295,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( slog.Error("Cannot parse feed base URL", "url", fb, "err", err) continue } + feedBaseURL.Path = "" res, err := afp.client.Get(feedURL.String()) if err != nil { From 36aab33de4ecfb1107e3174849ff9c750c84b8a0 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 20 Jun 2025 16:50:13 +0200 Subject: [PATCH 123/176] Use folder name as version if git describe failed --- Makefile | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 163ace5..0ae02b0 100644 --- a/Makefile +++ b/Makefile @@ -47,13 +47,18 @@ tag_checked_out: # In this case we might in some situations see an error like # `/bin/bash: line 1: 2b55bbb: value too great for base (error token is "2b55bbb")` # which can be ignored. -GITDESC := $(shell git describe --tags --always) +GITDESC := $(shell git describe --tags --always 2>/dev/null || true) +CURRENT_FOLDER_NAME := $(notdir $(CURDIR)) +ifeq ($(strip $(GITDESC)),) +SEMVER := $(CURRENT_FOLDER_NAME) +else GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/v?[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') SEMVERPATCH := $(shell echo $$(( $(GITDESCPATCH) + 1 ))) # Hint: The second regexp in the next line only matches # if there is a hyphen (`-`) followed by a number, # by which we assume that git describe has added a string after the tag SEMVER := $(shell echo '$(GITDESC)' | sed -E -e 's/^v//' -e 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) +endif testsemver: @echo from \'$(GITDESC)\' transformed to \'$(SEMVER)\' From 9c62e89a23d71d9f9e3cdd24940c3a0c300ac33c Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 24 Jun 2025 14:34:44 +0200 Subject: [PATCH 124/176] Feat: More explicitely handle which doc files are included in the gnulinux dist --- Makefile | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 0ae02b0..f399bf5 100644 --- a/Makefile +++ b/Makefile @@ -103,7 +103,13 @@ dist: build_linux build_win build_mac_amd64 build_mac_arm64 cp docs/$${f}.md dist/$(DISTDIR)-macos/docs ; \ done mkdir dist/$(DISTDIR)-gnulinux-amd64 - cp -r README.md docs bin-linux-amd64 dist/$(DISTDIR)-gnulinux-amd64 + cp -r README.md bin-linux-amd64 dist/$(DISTDIR)-gnulinux-amd64 + # adjust which docs to copy + mkdir -p dist/tmp_docs + cp -r docs/examples dist/tmp_docs + cp docs/*.md dist/tmp_docs + cp -r dist/tmp_docs dist/$(DISTDIR)-gnulinux-amd64/docs + rm -rf dist/tmp_docs cd dist/ ; zip -r $(DISTDIR)-windows-amd64.zip $(DISTDIR)-windows-amd64/ cd dist/ ; tar -cvmlzf $(DISTDIR)-gnulinux-amd64.tar.gz $(DISTDIR)-gnulinux-amd64/ cd dist/ ; tar -cvmlzf $(DISTDIR)-macos.tar.gz $(DISTDIR)-macos From 02d49311526b5ae27226e59487fa0350f25f4359 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 24 Jun 2025 17:06:55 +0200 Subject: [PATCH 125/176] Fix: Return properly early --- cmd/csaf_checker/processor.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index a574a5d..1110af1 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1437,6 +1437,7 @@ func (p *processor) checkWellknown(domain string) { if err != nil { p.badWellknownMetadata.add(ErrorType, fmt.Sprintf("Fetching %s failed: %v", path, err)) + return } if res.StatusCode != http.StatusOK { p.badWellknownMetadata.add(ErrorType, fmt.Sprintf("Fetching %s failed. Status code %d (%s)", From 3f4fe5cf185b73271be7d706e92b065ccfd54703 Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 24 Jun 2025 17:18:16 +0200 Subject: [PATCH 126/176] Also generate report when role is not available --- cmd/csaf_checker/processor.go | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 1110af1..7db2364 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -255,8 +255,7 @@ func (p *processor) run(domains []string) (*Report, error) { log.Printf("Could not parse the Provider-Metadata.json of: %s\n", d) } if err := p.checkDomain(d); err != nil { - log.Printf("Failed to find valid provider-metadata.json for domain %s: %v. "+ - "Continuing with next domain.", d, err) + log.Printf("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) } domain := &Domain{Name: d} @@ -267,8 +266,10 @@ func (p *processor) run(domains []string) (*Report, error) { } if domain.Role == nil { - log.Printf("No role found in meta data. Ignoring domain %q\n", d) - continue + log.Printf("No role found in meta data for domain %q\n", d) + // Assume provider to continue report generation + role := csaf.MetadataRolePublisher + domain.Role = &role } rules := roleRequirements(*domain.Role) From d09db6635da6e753940341513a2d1ae610bf0f49 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 24 Jun 2025 17:24:08 +0200 Subject: [PATCH 127/176] Fix: Assume most restrictive role to prevent false-positives --- cmd/csaf_checker/processor.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 7db2364..f977092 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -267,8 +267,8 @@ func (p *processor) run(domains []string) (*Report, error) { if domain.Role == nil { log.Printf("No role found in meta data for domain %q\n", d) - // Assume provider to continue report generation - role := csaf.MetadataRolePublisher + // Assume trusted provider to continue report generation + role := csaf.MetadataRoleTrustedProvider domain.Role = &role } From 5d37dd1339d394fe1cc1111f369a670b9e6a61ec Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 25 Jun 2025 09:27:12 +0200 Subject: [PATCH 128/176] Move PMD error from logs to report. --- cmd/csaf_checker/processor.go | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index f977092..ef273d0 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -251,11 +251,16 @@ func (p *processor) run(domains []string) (*Report, error) { p.reset() if !p.checkProviderMetadata(d) { - // We cannot build a report if the provider metadata cannot be parsed. - log.Printf("Could not parse the Provider-Metadata.json of: %s\n", d) + // We need to fail the domain if the PMD cannot be parsed. + p.badProviderMetadata.use() + message := fmt.Sprintf("Could not parse the Provider-Metadata.json of: %s", d) + p.badProviderMetadata.error(message) + } if err := p.checkDomain(d); err != nil { - log.Printf("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) + p.badProviderMetadata.use() + message := fmt.Sprintf("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) + p.badProviderMetadata.error(message) } domain := &Domain{Name: d} From d54e211ef3098e4dd74dc0ff85e8f3324760e4c9 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 27 Jun 2025 09:49:32 +0200 Subject: [PATCH 129/176] docs: improve README.md * Deemphazise the old repo link alert. * Add more hints about officially unsupported but possible use as library. solve #634 --- README.md | 35 ++++++++++++++++++++++------------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index b76bf95..ccb8d67 100644 --- a/README.md +++ b/README.md @@ -9,14 +9,6 @@ --> -> [!IMPORTANT] -> To avoid future breakage, if you still use `csaf-poc`: -> 1. Adjust your HTML links. -> 2. Adjust your go module paths, see [#579](https://github.com/gocsaf/csaf/issues/579#issuecomment-2497244379). -> -> (This repository was moved here on 2024-10-28. The old one is deprecated -> and redirection will be switched off a few months later.) - # csaf @@ -49,13 +41,22 @@ is a tool for testing a CSAF Trusted Provider according to [Section 7 of the CSA ### [csaf_aggregator](docs/csaf_aggregator.md) is a CSAF Aggregator, to list or mirror providers. -## Other stuff + +## Use as go library + +The modules of this repository can be used as library from other Go applications. [ISDuBA](https://github.com/ISDuBA/ISDuBA) does so, for example. +But there is only limited support, and thus _not officially supported_. +There are plans to change this without timeline, with a future major release, +e.g. see [#367](https://github.com/gocsaf/csaf/issues/367). + +Initially envisioned as toolbox, it was not constructed as a library, +and to name one issue, exposes to many functions. +This leads to problems like [#634](https://github.com/gocsaf/csaf/issues/634), where we have to accept that with 3.2.0 there was an unintended API change, +that we now have to live with. ### [examples](./examples/README.md) -are small examples of how to use `github.com/gocsaf/csaf` -as an API. Currently this is a work in progress, as usage of this repository -as a library to access is _not officially supported_, e.g. -see https://github.com/gocsaf/csaf/issues/367 . +are small examples of how to use `github.com/gocsaf/csaf` as an API. Currently this is a work in progress. + ## Setup Binaries for the server side are only available and tested @@ -107,6 +108,14 @@ Binaries will be placed in directories named like `bin-linux-amd64/` and `bin-wi For further details of the development process consult our [development page](./docs/Development.md). +## Previous repo URLs + +> [!NOTE] +> To avoid future breakage, if you have `csaf-poc` in some of your URLs: +> 1. Adjust your HTML links. +> 2. Adjust your go module paths, see [#579](https://github.com/gocsaf/csaf/issues/579#issuecomment-2497244379). +> +> (This repository was moved here from https://github.com/csaf-poc/csaf_distribution on 2024-10-28. The old one is deprecated and redirection will be switched off somtimes in 2025.) ## License From a6d0a0c790644362cb128f473e42c10b8e993bf5 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 27 Jun 2025 10:20:56 +0200 Subject: [PATCH 130/176] docs: extend package csaf doc comment * fix sentence. * add link to the section in the top-level readme that has the limits on the use as a library. --- csaf/doc.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/csaf/doc.go b/csaf/doc.go index f1e092c..233bda6 100644 --- a/csaf/doc.go +++ b/csaf/doc.go @@ -6,7 +6,11 @@ // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH -// Package csaf contains the core data models used by the csaf distribution. +// Package csaf contains the core data models used by the csaf distribution +// tools. +// +// See https://github.com/gocsaf/csaf/tab=readme-ov-file#use-as-go-library +// about hints and limits for its use as a library. package csaf //go:generate go run ./generate_cvss_enums.go -o cvss20enums.go -i ./schema/cvss-v2.0.json -p CVSS20 From 7b7d0c4dcb035d1edd8684d115abd246684e9e60 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 27 Jun 2025 10:24:48 +0200 Subject: [PATCH 131/176] improve phrasing --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index ccb8d67..094412f 100644 --- a/README.md +++ b/README.md @@ -45,9 +45,8 @@ is a CSAF Aggregator, to list or mirror providers. ## Use as go library The modules of this repository can be used as library from other Go applications. [ISDuBA](https://github.com/ISDuBA/ISDuBA) does so, for example. -But there is only limited support, and thus _not officially supported_. -There are plans to change this without timeline, with a future major release, -e.g. see [#367](https://github.com/gocsaf/csaf/issues/367). +But there is only limited support, and thus it is _not officially supported_. +There are plans to change this without concrete schedule, with a future major release, e.g. see [#367](https://github.com/gocsaf/csaf/issues/367). Initially envisioned as toolbox, it was not constructed as a library, and to name one issue, exposes to many functions. From a7b1291be858edd0d555bd7026cd6e2ba050eba5 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 27 Jun 2025 17:20:19 +0200 Subject: [PATCH 132/176] Print warning if no config file was found --- internal/options/options.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/options/options.go b/internal/options/options.go index 3a4867f..38b5bd4 100644 --- a/internal/options/options.go +++ b/internal/options/options.go @@ -46,7 +46,6 @@ type Parser[C any] struct { // If a config file was specified it is loaded. // Returns the arguments and the configuration. func (p *Parser[C]) Parse() ([]string, *C, error) { - var cmdLineOpts C if p.SetDefaults != nil { p.SetDefaults(&cmdLineOpts) @@ -82,6 +81,7 @@ func (p *Parser[C]) Parse() ([]string, *C, error) { // No config file -> We are good. if path == "" { + slog.Warn("No config file found. Maybe you want to specify one or store it in a respective default location", "locations", p.DefaultConfigLocations) return args, &cmdLineOpts, nil } From 27e9519ed56efeecf47fb94257a0f32427ad5aae Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 2 Jul 2025 09:20:27 +0200 Subject: [PATCH 133/176] Fix: Remove some Typos as well as grammatical errors and oddities --- README.md | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 094412f..ad2dc86 100644 --- a/README.md +++ b/README.md @@ -44,14 +44,13 @@ is a CSAF Aggregator, to list or mirror providers. ## Use as go library -The modules of this repository can be used as library from other Go applications. [ISDuBA](https://github.com/ISDuBA/ISDuBA) does so, for example. -But there is only limited support, and thus it is _not officially supported_. -There are plans to change this without concrete schedule, with a future major release, e.g. see [#367](https://github.com/gocsaf/csaf/issues/367). +The modules of this repository can be used as library by other Go applications. [ISDuBA](https://github.com/ISDuBA/ISDuBA) does so, for example. +But there is only limited support and thus it is _not officially supported_. +There are plans to change this without a concrete schedule within a future major release, e.g. see [#367](https://github.com/gocsaf/csaf/issues/367). -Initially envisioned as toolbox, it was not constructed as a library, -and to name one issue, exposes to many functions. -This leads to problems like [#634](https://github.com/gocsaf/csaf/issues/634), where we have to accept that with 3.2.0 there was an unintended API change, -that we now have to live with. +Initially envisioned as a toolbox, it was not constructed as a library, +and to name one issue, exposes too many functions. +This leads to problems like [#634](https://github.com/gocsaf/csaf/issues/634), where we have to accept that with 3.2.0 there was an unintended API change. ### [examples](./examples/README.md) are small examples of how to use `github.com/gocsaf/csaf` as an API. Currently this is a work in progress. From 21ce19735bbeab67353ef97939b53a2fa5322903 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 2 Jul 2025 09:23:23 +0200 Subject: [PATCH 134/176] Fix: Fix typo and misleading meaning --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ad2dc86..897dfe0 100644 --- a/README.md +++ b/README.md @@ -113,7 +113,7 @@ For further details of the development process consult our [development page](./ > 1. Adjust your HTML links. > 2. Adjust your go module paths, see [#579](https://github.com/gocsaf/csaf/issues/579#issuecomment-2497244379). > -> (This repository was moved here from https://github.com/csaf-poc/csaf_distribution on 2024-10-28. The old one is deprecated and redirection will be switched off somtimes in 2025.) +> (This repository was moved here from https://github.com/csaf-poc/csaf_distribution on 2024-10-28. The old one is deprecated and redirection will be switched off sometime in 2025.) ## License From 3262e2ec2a746a78e1ee829455d37a09df009790 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 2 Jul 2025 15:33:37 +0200 Subject: [PATCH 135/176] Fix aggregator url base handling --- cmd/csaf_aggregator/mirror.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 1ef5881..f7b3100 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -71,6 +71,7 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) { if err != nil { return nil, err } + base.Path = "" afp := csaf.NewAdvisoryFileProcessor( w.client, From 01c43d96ce47d34cfd981dd297de97b06113055e Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 2 Jul 2025 16:27:58 +0200 Subject: [PATCH 136/176] Fix checker url base handling --- cmd/csaf_checker/processor.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index def1960..2e0a424 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1374,6 +1374,7 @@ func (p *processor) checkSecurityFolder(folder string) string { if err != nil { return err.Error() } + base.Path = "" u = misc.JoinURL(base, up).String() p.checkTLS(u) From fc3837d655f3b4d08fcf4c61196fd4cfcfa501da Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 2 Jul 2025 17:06:25 +0200 Subject: [PATCH 137/176] Make json parsing more strict --- cmd/csaf_aggregator/interim.go | 4 ++-- cmd/csaf_aggregator/mirror.go | 5 ++--- cmd/csaf_checker/processor.go | 11 +++++----- cmd/csaf_downloader/downloader.go | 3 ++- cmd/csaf_uploader/processor.go | 7 +++---- cmd/csaf_validator/main.go | 4 ++-- csaf/advisory.go | 5 +++-- csaf/generate_cvss_enums.go | 5 +++-- csaf/models.go | 5 ++--- csaf/providermetaloader.go | 7 +++---- csaf/remotevalidation.go | 6 +++--- csaf/rolie.go | 8 ++++---- internal/misc/json.go | 34 +++++++++++++++++++++++++++++++ 13 files changed, 68 insertions(+), 36 deletions(-) create mode 100644 internal/misc/json.go diff --git a/cmd/csaf_aggregator/interim.go b/cmd/csaf_aggregator/interim.go index 94147bc..8805fdb 100644 --- a/cmd/csaf_aggregator/interim.go +++ b/cmd/csaf_aggregator/interim.go @@ -13,7 +13,6 @@ import ( "crypto/sha256" "crypto/sha512" "encoding/csv" - "encoding/json" "errors" "fmt" "io" @@ -25,6 +24,7 @@ import ( "time" "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -81,7 +81,7 @@ func (w *worker) checkInterims( if err := func() error { defer res.Body.Close() tee := io.TeeReader(res.Body, hasher) - return json.NewDecoder(tee).Decode(&doc) + return misc.StrictJSONParse(tee, &doc) }(); err != nil { return nil, err } diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 1ef5881..f9ddcad 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -13,7 +13,6 @@ import ( "crypto/sha256" "crypto/sha512" "encoding/hex" - "encoding/json" "fmt" "io" "log/slog" @@ -31,6 +30,7 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -538,7 +538,7 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) download := func(r io.Reader) error { tee := io.TeeReader(r, hasher) - return json.NewDecoder(tee).Decode(&advisory) + return misc.StrictJSONParse(tee, &advisory) } if err := downloadJSON(w.client, file.URL(), download); err != nil { @@ -627,7 +627,6 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) // If this fails it creates a signature itself with the configured key. func (w *worker) downloadSignatureOrSign(url, fname string, data []byte) error { sig, err := w.downloadSignature(url) - if err != nil { if err != errNotFound { w.log.Error("Could not find signature URL", "url", url, "err", err) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index def1960..08ec55e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -15,10 +15,8 @@ import ( "crypto/sha512" "crypto/tls" "encoding/csv" - "encoding/json" "errors" "fmt" - "github.com/gocsaf/csaf/v3/internal/misc" "io" "log" "net/http" @@ -30,6 +28,8 @@ import ( "strings" "time" + "github.com/gocsaf/csaf/v3/internal/misc" + "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/time/rate" @@ -518,7 +518,7 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { return nil, nil, fmt.Errorf("%s: %v", feed, err) } var rolieDoc any - err = json.NewDecoder(bytes.NewReader(all)).Decode(&rolieDoc) + err = misc.StrictJSONParse(bytes.NewReader(all), &rolieDoc) return rfeed, rolieDoc, err }() if err != nil { @@ -702,7 +702,7 @@ func (p *processor) integrity( if err := func() error { defer res.Body.Close() tee := io.TeeReader(res.Body, hasher) - return json.NewDecoder(tee).Decode(&doc) + return misc.StrictJSONParse(tee, &doc) }(); err != nil { lg(ErrorType, "Reading %s failed: %v", u, err) continue @@ -1035,8 +1035,7 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] - times, files = - append(times, t), + times, files = append(times, t), append(files, csaf.DirectoryAdvisoryFile{Path: path}) p.timesChanges[path] = t } diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 2b08544..4890593 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -35,6 +35,7 @@ import ( "golang.org/x/time/rate" "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -551,7 +552,7 @@ func (dc *downloadContext) downloadAdvisory( tee := io.TeeReader(resp.Body, hasher) - if err := json.NewDecoder(tee).Decode(&doc); err != nil { + if err := misc.StrictJSONParse(tee, &doc); err != nil { dc.stats.downloadFailed++ slog.Warn("Downloading failed", "url", file.URL(), diff --git a/cmd/csaf_uploader/processor.go b/cmd/csaf_uploader/processor.go index f655e02..104e1ef 100644 --- a/cmd/csaf_uploader/processor.go +++ b/cmd/csaf_uploader/processor.go @@ -11,7 +11,6 @@ package main import ( "bytes" "crypto/tls" - "encoding/json" "errors" "fmt" "io" @@ -91,7 +90,7 @@ func (p *processor) create() error { Errors []string `json:"errors"` } - if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + if err := misc.StrictJSONParse(resp.Body, &result); err != nil { return err } @@ -115,7 +114,7 @@ func (p *processor) uploadRequest(filename string) (*http.Request, error) { if !p.cfg.NoSchemaCheck { var doc any - if err := json.NewDecoder(bytes.NewReader(data)).Decode(&doc); err != nil { + if err := misc.StrictJSONParse(bytes.NewReader(data), &doc); err != nil { return nil, err } errs, err := csaf.ValidateCSAF(doc) @@ -239,7 +238,7 @@ func (p *processor) process(filename string) error { Errors []string `json:"errors"` } - if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + if err := misc.StrictJSONParse(resp.Body, &result); err != nil { return err } diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index b3a0855..8cf6d9a 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -10,7 +10,6 @@ package main import ( - "encoding/json" "fmt" "log" "os" @@ -19,6 +18,7 @@ import ( "github.com/jessevdk/go-flags" "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -301,7 +301,7 @@ func loadJSONFromFile(fname string) (any, error) { } defer f.Close() var doc any - if err = json.NewDecoder(f).Decode(&doc); err != nil { + if err = misc.StrictJSONParse(f, &doc); err != nil { return nil, err } return doc, err diff --git a/csaf/advisory.go b/csaf/advisory.go index e81a28a..cc2516a 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -14,6 +14,8 @@ import ( "fmt" "io" "os" + + "github.com/gocsaf/csaf/v3/internal/misc" ) // Acknowledgement reflects the 'acknowledgement' object in the list of acknowledgements. @@ -383,7 +385,6 @@ type Relationship struct { FullProductName *FullProductName `json:"full_product_name"` // required ProductReference *ProductID `json:"product_reference"` // required RelatesToProductReference *ProductID `json:"relates_to_product_reference"` // required - } // Relationships is a list of Relationship. @@ -1391,7 +1392,7 @@ func LoadAdvisory(fname string) (*Advisory, error) { } defer f.Close() var advisory Advisory - if err := json.NewDecoder(f).Decode(&advisory); err != nil { + if err := misc.StrictJSONParse(f, &advisory); err != nil { return nil, err } if err := advisory.Validate(); err != nil { diff --git a/csaf/generate_cvss_enums.go b/csaf/generate_cvss_enums.go index c84ab15..2fa214b 100644 --- a/csaf/generate_cvss_enums.go +++ b/csaf/generate_cvss_enums.go @@ -12,7 +12,6 @@ package main import ( "bytes" - "encoding/json" "flag" "fmt" "go/format" @@ -22,6 +21,8 @@ import ( "sort" "strings" "text/template" + + "github.com/gocsaf/csaf/v3/internal/misc" ) // We from Intevation consider the source code parts in the following @@ -98,7 +99,7 @@ func loadSchema(filename string) (*schema, error) { } defer f.Close() var s schema - if err := json.NewDecoder(f).Decode(&s); err != nil { + if err := misc.StrictJSONParse(f, &s); err != nil { return nil, err } return &s, nil diff --git a/csaf/models.go b/csaf/models.go index c4b132d..983bf9c 100644 --- a/csaf/models.go +++ b/csaf/models.go @@ -17,6 +17,7 @@ import ( "strings" "time" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -575,7 +576,6 @@ func (d *Distribution) Validate() error { // Validate checks if the provider metadata is valid. // Returns an error if the validation fails otherwise nil. func (pmd *ProviderMetadata) Validate() error { - switch { case pmd.CanonicalURL == nil: return errors.New("canonical_url is mandatory") @@ -695,8 +695,7 @@ func (pmd *ProviderMetadata) WriteTo(w io.Writer) (int64, error) { func LoadProviderMetadata(r io.Reader) (*ProviderMetadata, error) { var pmd ProviderMetadata - dec := json.NewDecoder(r) - if err := dec.Decode(&pmd); err != nil { + if err := misc.StrictJSONParse(r, &pmd); err != nil { return nil, err } diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index 72412b3..6f08eb7 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -11,13 +11,13 @@ package csaf import ( "bytes" "crypto/sha256" - "encoding/json" "fmt" "io" "log/slog" "net/http" "strings" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -33,7 +33,7 @@ type ProviderMetadataLoader struct { type ProviderMetadataLoadMessageType int const ( - //JSONDecodingFailed indicates problems with JSON decoding + // JSONDecodingFailed indicates problems with JSON decoding JSONDecodingFailed ProviderMetadataLoadMessageType = iota // SchemaValidationFailed indicates a general problem with schema validation. SchemaValidationFailed @@ -149,7 +149,6 @@ func (pmdl *ProviderMetadataLoader) Enumerate(domain string) []*LoadedProviderMe } dnsURL := "https://csaf.data.security." + domain return []*LoadedProviderMetadata{pmdl.loadFromURL(dnsURL)} - } // Load loads one valid provider metadata for a given path. @@ -323,7 +322,7 @@ func (pmdl *ProviderMetadataLoader) loadFromURL(path string) *LoadedProviderMeta var doc any - if err := json.NewDecoder(tee).Decode(&doc); err != nil { + if err := misc.StrictJSONParse(tee, &doc); err != nil { result.Messages.Add( JSONDecodingFailed, fmt.Sprintf("JSON decoding failed: %v", err)) diff --git a/csaf/remotevalidation.go b/csaf/remotevalidation.go index 9e99b6f..97d612e 100644 --- a/csaf/remotevalidation.go +++ b/csaf/remotevalidation.go @@ -18,6 +18,7 @@ import ( "net/http" "sync" + "github.com/gocsaf/csaf/v3/internal/misc" bolt "go.etcd.io/bbolt" ) @@ -180,7 +181,6 @@ func prepareCache(config string) (cache, error) { return create() } return nil - }); err != nil { db.Close() return nil, err @@ -256,7 +256,7 @@ func deserialize(value []byte) (*RemoteValidationResult, error) { } defer r.Close() var rvr RemoteValidationResult - if err := json.NewDecoder(r).Decode(&rvr); err != nil { + if err := misc.StrictJSONParse(r, &rvr); err != nil { return nil, err } return &rvr, nil @@ -323,7 +323,7 @@ func (v *remoteValidator) Validate(doc any) (*RemoteValidationResult, error) { // no cache -> process directly. in = resp.Body } - return json.NewDecoder(in).Decode(&rvr) + return misc.StrictJSONParse(in, &rvr) }(); err != nil { return nil, err } diff --git a/csaf/rolie.go b/csaf/rolie.go index b94cfa3..d3a5ac7 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -14,6 +14,7 @@ import ( "sort" "time" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -54,7 +55,7 @@ type ROLIEServiceDocument struct { // LoadROLIEServiceDocument loads a ROLIE service document from a reader. func LoadROLIEServiceDocument(r io.Reader) (*ROLIEServiceDocument, error) { var rsd ROLIEServiceDocument - if err := json.NewDecoder(r).Decode(&rsd); err != nil { + if err := misc.StrictJSONParse(r, &rsd); err != nil { return nil, err } return &rsd, nil @@ -122,7 +123,7 @@ func (rcd *ROLIECategoryDocument) Merge(categories ...string) bool { // LoadROLIECategoryDocument loads a ROLIE category document from a reader. func LoadROLIECategoryDocument(r io.Reader) (*ROLIECategoryDocument, error) { var rcd ROLIECategoryDocument - if err := json.NewDecoder(r).Decode(&rcd); err != nil { + if err := misc.StrictJSONParse(r, &rcd); err != nil { return nil, err } return &rcd, nil @@ -195,9 +196,8 @@ type ROLIEFeed struct { // LoadROLIEFeed loads a ROLIE feed from a reader. func LoadROLIEFeed(r io.Reader) (*ROLIEFeed, error) { - dec := json.NewDecoder(r) var rf ROLIEFeed - if err := dec.Decode(&rf); err != nil { + if err := misc.StrictJSONParse(r, &rf); err != nil { return nil, err } return &rf, nil diff --git a/internal/misc/json.go b/internal/misc/json.go new file mode 100644 index 0000000..0bb2ec0 --- /dev/null +++ b/internal/misc/json.go @@ -0,0 +1,34 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +package misc + +import ( + "encoding/json" + "fmt" + "io" +) + +// StrictJSONParse provides JSON parsing with stronger validation. +func StrictJSONParse(jsonData io.Reader, target interface{}) error { + decoder := json.NewDecoder(jsonData) + + decoder.DisallowUnknownFields() + + err := decoder.Decode(target) + if err != nil { + return fmt.Errorf("strictJSONParse: %w", err) + } + + token, err := decoder.Token() + if err != io.EOF { + return fmt.Errorf("strictJSONParse: unexpected trailing data after JSON: token: %v, err: %v", token, err) + } + + return nil +} From e7c08d05cd78ee31a2547acc6b8bfcd85d4aaf04 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Thu, 3 Jul 2025 10:58:32 +0200 Subject: [PATCH 138/176] Rewrite function from scratch --- internal/misc/json.go | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/internal/misc/json.go b/internal/misc/json.go index 0bb2ec0..c30323d 100644 --- a/internal/misc/json.go +++ b/internal/misc/json.go @@ -3,8 +3,8 @@ // // SPDX-License-Identifier: Apache-2.0 // -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH +// SPDX-FileCopyrightText: 2025 German Federal Office for Information Security (BSI) +// Software-Engineering: 2025 Intevation GmbH package misc @@ -14,20 +14,23 @@ import ( "io" ) -// StrictJSONParse provides JSON parsing with stronger validation. +// StrictJSONParse creates a JSON decoder that decodes an interface +// while not allowing unknown fields nor trailing data func StrictJSONParse(jsonData io.Reader, target interface{}) error { decoder := json.NewDecoder(jsonData) - + // Don't allow unknown fields decoder.DisallowUnknownFields() - err := decoder.Decode(target) - if err != nil { - return fmt.Errorf("strictJSONParse: %w", err) + if err := decoder.Decode(target); err != nil { + return fmt.Errorf("JSON decoding error: %w", err) } - token, err := decoder.Token() - if err != io.EOF { - return fmt.Errorf("strictJSONParse: unexpected trailing data after JSON: token: %v, err: %v", token, err) + // Check for any trailing data after the main JSON structure + if _, err := decoder.Token(); err != io.EOF { + if err != nil { + return fmt.Errorf("error reading trailing data: %w", err) + } + return fmt.Errorf("unexpected trailing data after JSON object") } return nil From c81f55a752b33236d1b35f980baedaaaa04dea32 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 4 Jul 2025 15:29:03 +0200 Subject: [PATCH 139/176] Add LoadAdvisory tests --- csaf/advisory_test.go | 50 +++++ internal/misc/json.go | 2 +- .../avendor-advisory-0004.json | 171 ++++++++++++++++++ .../unknown-fields/avendor-advisory-0004.json | 171 ++++++++++++++++++ .../valid/avendor-advisory-0004.json | 170 +++++++++++++++++ 5 files changed, 563 insertions(+), 1 deletion(-) create mode 100644 csaf/advisory_test.go create mode 100644 testdata/csaf-documents/trailing-garbage-data/avendor-advisory-0004.json create mode 100644 testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json create mode 100644 testdata/csaf-documents/valid/avendor-advisory-0004.json diff --git a/csaf/advisory_test.go b/csaf/advisory_test.go new file mode 100644 index 0000000..062a713 --- /dev/null +++ b/csaf/advisory_test.go @@ -0,0 +1,50 @@ +package csaf + +import ( + "os" + "path/filepath" + "testing" +) + +func TestLoadAdvisory(t *testing.T) { + type args struct { + jsonDir string + } + tests := []struct { + name string + args args + wantErr bool + }{{ + name: "Valid documents", + args: args{jsonDir: "csaf-documents/valid"}, + wantErr: false, + }, + { + name: "Unknown fields", + args: args{jsonDir: "csaf-documents/unknown-fields"}, + wantErr: true, + }, + { + name: "Garbage trailing data", + args: args{jsonDir: "csaf-documents/trailing-garbage-data"}, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := filepath.Walk("../testdata/"+tt.args.jsonDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.Mode().IsRegular() && filepath.Ext(info.Name()) == ".json" { + if _, err := LoadAdvisory(path); (err != nil) != tt.wantErr { + t.Errorf("LoadAdvisory() error = %v, wantErr %v", err, tt.wantErr) + } + } + return nil + }); err != nil { + t.Fatal(err) + } + }) + } +} diff --git a/internal/misc/json.go b/internal/misc/json.go index c30323d..653c166 100644 --- a/internal/misc/json.go +++ b/internal/misc/json.go @@ -16,7 +16,7 @@ import ( // StrictJSONParse creates a JSON decoder that decodes an interface // while not allowing unknown fields nor trailing data -func StrictJSONParse(jsonData io.Reader, target interface{}) error { +func StrictJSONParse(jsonData io.Reader, target any) error { decoder := json.NewDecoder(jsonData) // Don't allow unknown fields decoder.DisallowUnknownFields() diff --git a/testdata/csaf-documents/trailing-garbage-data/avendor-advisory-0004.json b/testdata/csaf-documents/trailing-garbage-data/avendor-advisory-0004.json new file mode 100644 index 0000000..2131136 --- /dev/null +++ b/testdata/csaf-documents/trailing-garbage-data/avendor-advisory-0004.json @@ -0,0 +1,171 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} +invalid data diff --git a/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json b/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json new file mode 100644 index 0000000..17321ae --- /dev/null +++ b/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json @@ -0,0 +1,171 @@ +{ + "document": { + "unknown-field": false, + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} diff --git a/testdata/csaf-documents/valid/avendor-advisory-0004.json b/testdata/csaf-documents/valid/avendor-advisory-0004.json new file mode 100644 index 0000000..0e194e9 --- /dev/null +++ b/testdata/csaf-documents/valid/avendor-advisory-0004.json @@ -0,0 +1,170 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} From 7935818600ee70cbcb7784a67788a4f3bacaba01 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Mon, 7 Jul 2025 11:41:49 +0200 Subject: [PATCH 140/176] Fix: Allow unknown fields: They are not forbidden --- internal/misc/json.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/internal/misc/json.go b/internal/misc/json.go index 653c166..4ecc6a5 100644 --- a/internal/misc/json.go +++ b/internal/misc/json.go @@ -18,8 +18,6 @@ import ( // while not allowing unknown fields nor trailing data func StrictJSONParse(jsonData io.Reader, target any) error { decoder := json.NewDecoder(jsonData) - // Don't allow unknown fields - decoder.DisallowUnknownFields() if err := decoder.Decode(target); err != nil { return fmt.Errorf("JSON decoding error: %w", err) From 4b4d6ed5943c5bf0e953e22454a4da55302b5a15 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Mon, 7 Jul 2025 11:45:36 +0200 Subject: [PATCH 141/176] Remove uknown field tests --- csaf/advisory_test.go | 5 - .../unknown-fields/avendor-advisory-0004.json | 171 ------------------ 2 files changed, 176 deletions(-) delete mode 100644 testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json diff --git a/csaf/advisory_test.go b/csaf/advisory_test.go index 062a713..9a82884 100644 --- a/csaf/advisory_test.go +++ b/csaf/advisory_test.go @@ -19,11 +19,6 @@ func TestLoadAdvisory(t *testing.T) { args: args{jsonDir: "csaf-documents/valid"}, wantErr: false, }, - { - name: "Unknown fields", - args: args{jsonDir: "csaf-documents/unknown-fields"}, - wantErr: true, - }, { name: "Garbage trailing data", args: args{jsonDir: "csaf-documents/trailing-garbage-data"}, diff --git a/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json b/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json deleted file mode 100644 index 17321ae..0000000 --- a/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json +++ /dev/null @@ -1,171 +0,0 @@ -{ - "document": { - "unknown-field": false, - "category": "csaf_vex", - "csaf_version": "2.0", - "distribution": { - "tlp": { - "label": "WHITE", - "url": "https://www.first.org/tlp/v1/" - } - }, - "notes": [ - { - "category": "summary", - "title": "Test document summary", - "text": "Auto generated test CSAF document" - } - ], - "publisher": { - "category": "vendor", - "name": "ACME Inc.", - "namespace": "https://www.example.com" - }, - "title": "Test CSAF document", - "tracking": { - "current_release_date": "2020-01-01T00:00:00Z", - "generator": { - "date": "2020-01-01T00:00:00Z", - "engine": { - "name": "csaf-tool", - "version": "0.3.2" - } - }, - "id": "Avendor-advisory-0004", - "initial_release_date": "2020-01-01T00:00:00Z", - "revision_history": [ - { - "date": "2020-01-01T00:00:00Z", - "number": "1", - "summary": "Initial version" - } - ], - "status": "final", - "version": "1" - } - }, - "product_tree": { - "branches": [ - { - "category": "vendor", - "name": "AVendor", - "branches": [ - { - "category": "product_name", - "name": "product_1", - "branches": [ - { - "category": "product_version", - "name": "1.1", - "product": { - "name": "AVendor product_1 1.1", - "product_id": "CSAFPID_0001" - } - }, - { - "category": "product_version", - "name": "1.2", - "product": { - "name": "AVendor product_1 1.2", - "product_id": "CSAFPID_0002" - } - }, - { - "category": "product_version", - "name": "2.0", - "product": { - "name": "AVendor product_1 2.0", - "product_id": "CSAFPID_0003" - } - } - ] - } - ] - }, - { - "category": "vendor", - "name": "AVendor1", - "branches": [ - { - "category": "product_name", - "name": "product_2", - "branches": [ - { - "category": "product_version", - "name": "1", - "product": { - "name": "AVendor1 product_2 1", - "product_id": "CSAFPID_0004" - } - } - ] - } - ] - }, - { - "category": "vendor", - "name": "AVendor", - "branches": [ - { - "category": "product_name", - "name": "product_3", - "branches": [ - { - "category": "product_version", - "name": "2022H2", - "product": { - "name": "AVendor product_3 2022H2", - "product_id": "CSAFPID_0005" - } - } - ] - } - ] - } - ] - }, - "vulnerabilities": [ - { - "cve": "CVE-2020-1234", - "notes": [ - { - "category": "description", - "title": "CVE description", - "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" - } - ], - "product_status": { - "under_investigation": ["CSAFPID_0001"] - }, - "threats": [ - { - "category": "impact", - "details": "Customers should upgrade to the latest version of the product", - "date": "2020-01-01T00:00:00Z", - "product_ids": ["CSAFPID_0001"] - } - ] - }, - { - "cve": "CVE-2020-9876", - "notes": [ - { - "category": "description", - "title": "CVE description", - "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" - } - ], - "product_status": { - "under_investigation": ["CSAFPID_0001"] - }, - "threats": [ - { - "category": "impact", - "details": "Still under investigation", - "date": "2020-01-01T00:00:00Z", - "product_ids": ["CSAFPID_0001"] - } - ] - } - ] -} From 230e9f2d2ba50706ab90f1eb7739d00a42d9e335 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 31 Jul 2025 11:29:44 +0200 Subject: [PATCH 142/176] fix minor docs typo --- docs/scripts/Readme.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/scripts/Readme.md b/docs/scripts/Readme.md index 77e8dae..e0bc7c9 100644 --- a/docs/scripts/Readme.md +++ b/docs/scripts/Readme.md @@ -1,5 +1,5 @@ Scripts for assisting the Integration tests. -They were written on Ubuntu 20.04 TLS amd64 and also tested with 24.04 TLS. +They were written on Ubuntu 20.04 LTS amd64 and also tested with 24.04 LTS. - `prepareUbuntuInstanceForITests.sh` installs the required packages for the csaf integration tests on a naked Ubuntu LTS amd64. @@ -8,9 +8,9 @@ and configures nginx for serving TLS connections. - `TLSClientConfigsForITest.sh` generates client certificates by calling `createCCForITest.sh` which uses the root certificate initialized before with `createRootCAForITest.sh`. It configures nginx to enable the authentication with client certificate. (This assumes that the same folder name is used to create the root certificate) -- `setupProviderForITest.sh` builds the csaf_provider, writes the required nginx configurations and create the initial folders. IT calls `uploadToProvider.sh` to upload some csaf example files to the provider. +- `setupProviderForITest.sh` builds the `csaf_provider`, writes the required nginx configurations and create the initial folders. IT calls `uploadToProvider.sh` to upload some csaf example files to the provider. -As creating the folders needs to authenticate with the csaf_provider, the configurations of TLS server and Client certificate authentication should be set. So it is recommended to call the scripts in this order: `TLSConfigsForITest.sh`, `TLSClientConfigsForITest.sh`, `setupProviderForITest.sh` +As creating the folders needs to authenticate with the `csaf_provider`, the configurations of TLS server and Client certificate authentication should be set. So it is recommended to call the scripts in this order: `TLSConfigsForITest.sh`, `TLSClientConfigsForITest.sh`, `setupProviderForITest.sh` Calling example (as user with sudo privileges): ``` bash From 7f27a63e3c42d9647fe4cf6af56f2a9ca5316c7c Mon Sep 17 00:00:00 2001 From: Sebastian Wagner Date: Fri, 1 Aug 2025 11:42:52 +0200 Subject: [PATCH 143/176] docs provider-setup.md: Fix create URL in curl command --- docs/provider-setup.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/provider-setup.md b/docs/provider-setup.md index 2fdf1e3..d54268f 100644 --- a/docs/provider-setup.md +++ b/docs/provider-setup.md @@ -144,7 +144,7 @@ on a GNU/Linux operating system. Create the folders: ```(shell) -curl https://192.168.56.102/cgi-bin/csaf_provider.go/create --cert-type p12 --cert {clientCertificat.p12} +curl https://192.168.56.102/cgi-bin/csaf_provider.go/api/create --cert-type p12 --cert {clientCertificat.p12} ``` Replace {clientCertificate.p12} with the client certificate file in pkcs12 format which includes the corresponding key as well. From 7fc5600521bd624c159d06f3e2a0d50c94390472 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 1 Aug 2025 10:55:10 +0200 Subject: [PATCH 144/176] Fix #669 Return error when the create request failed. --- cmd/csaf_uploader/processor.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_uploader/processor.go b/cmd/csaf_uploader/processor.go index 104e1ef..b3e00ce 100644 --- a/cmd/csaf_uploader/processor.go +++ b/cmd/csaf_uploader/processor.go @@ -81,8 +81,9 @@ func (p *processor) create() error { } defer resp.Body.Close() + var createError error if resp.StatusCode != http.StatusOK { - log.Printf("Create failed: %s\n", resp.Status) + createError = fmt.Errorf("create failed: %s", resp.Status) } var result struct { @@ -100,7 +101,7 @@ func (p *processor) create() error { writeStrings("Errors:", result.Errors) - return nil + return createError } // uploadRequest creates the request for uploading a csaf document by passing the filename. From 100e4d395bc64c9ba9ddb658a11bc31d7eb6cf71 Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 26 Aug 2025 11:49:38 +0200 Subject: [PATCH 145/176] Fix csaf checker listed check Correctly handle URLs that are absolute. --- cmd/csaf_checker/links.go | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_checker/links.go b/cmd/csaf_checker/links.go index c7aec57..4eed5f9 100644 --- a/cmd/csaf_checker/links.go +++ b/cmd/csaf_checker/links.go @@ -9,11 +9,12 @@ package main import ( - "github.com/gocsaf/csaf/v3/internal/misc" "io" "net/http" "net/url" + "github.com/gocsaf/csaf/v3/internal/misc" + "github.com/PuerkitoBio/goquery" "github.com/gocsaf/csaf/v3/util" @@ -94,7 +95,12 @@ func (pgs pages) listed( return err } // Links may be relative - abs := misc.JoinURL(baseURL, u).String() + var abs string + if u.IsAbs() { + abs = u.String() + } else { + abs = misc.JoinURL(baseURL, u).String() + } content.links.Add(abs) return nil }) From 108e5f8620a265571e237108ec2a0ae4f257d428 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 26 Aug 2025 15:24:51 +0200 Subject: [PATCH 146/176] improve docs/csaf_downloader.md (minor) time_range --- docs/csaf_downloader.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index d71b546..74c9e2c 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -104,8 +104,9 @@ ignorepattern = [".*white.*", ".*red.*"] #### Timerange option -The `timerange` parameter enables downloading advisories which last changes falls -into a given intervall. There are three possible notations: +The `time_range` parameter enables downloading advisories +which last changes falls into a given intervall. +There are three possible notations: 1. Relative. If the given string follows the rules of a [Go duration](https://pkg.go.dev/time@go1.20.6#ParseDuration), From 7ab964a3e3d5509f6dcb96e300b8e65b3aa71ff9 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Mon, 1 Sep 2025 11:48:56 +0200 Subject: [PATCH 147/176] Doc: Highlight the reason for the rate options existence (#662) * Doc: Highlight the reason for the rate options existence * Fix typos --- docs/csaf_aggregator.md | 6 ++++++ docs/csaf_checker.md | 7 +++++++ docs/csaf_downloader.md | 6 ++++++ 3 files changed, 19 insertions(+) diff --git a/docs/csaf_aggregator.md b/docs/csaf_aggregator.md index 661871c..04efa3a 100644 --- a/docs/csaf_aggregator.md +++ b/docs/csaf_aggregator.md @@ -247,3 +247,9 @@ insecure = true In case you want to provide CSAF advisories from others that only qualify as CSAF publishers, see [how to use the `csaf_aggregator` as "CSAF proxy provider"](proxy-provider-for-aggregator.md). + +Some providers may limit the rate of requests that may be sent to retrieve advisories. +This may cause issues with the aggregator. +In this case, the --rate option can be used to adjust the requests per second +sent by each worker of the aggregator to an acceptable rate. +(The rate that is considered acceptable depends on the provider.) diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index 5152501..5c812bd 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -78,6 +78,13 @@ The option `timerange` allows to only check advisories from a given time interval. It can only be given once. See the [downloader documentation](csaf_downloader.md#timerange-option) for details. +Some providers may limit the rate of requests that may be sent to retrieve advisories. +This may cause the checker to be unable to retrieve all advisories. In this case, +the --rate option can be used to adjust the requests per second +sent by the checker to an acceptable rate. +(The rate that is considered acceptable depends on the provider.) + + You can ignore certain advisories while checking by specifying a list of regular expressions[^1] to match their URLs by using the `ignorepattern` option. diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index d71b546..9168f3c 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -51,6 +51,12 @@ to download more advisories at once. This may improve the overall speed of the d However, since this also increases the load on the servers, their administrators could have taken countermeasures to limit this. +For example, some providers may limit the rate of requests that may be sent to retrieve advisories. +This may cause the downloader to be unable to retrieve all advisories. +In this case, the --rate option can be used to adjust the requests per second +sent by the downloader to an acceptable rate. +(The rate that is considered acceptable depends on the provider.) + If no config file is explictly given the follwing places are searched for a config file: ``` From 1f1a2a4cbc4654942e83d8a8794303b4d17a557e Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Mon, 1 Sep 2025 12:04:17 +0200 Subject: [PATCH 148/176] Add arm64 builds for windows and linux (#663) --- Makefile | 47 +++++++++++++++++++++++++++++++++++------------ 1 file changed, 35 insertions(+), 12 deletions(-) diff --git a/Makefile b/Makefile index f399bf5..8f164fc 100644 --- a/Makefile +++ b/Makefile @@ -12,15 +12,15 @@ SHELL = /bin/bash BUILD = go build MKDIR = mkdir -p -.PHONY: build build_linux build_win build_mac_amd64 build_mac_arm64 tag_checked_out mostlyclean +.PHONY: build build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64 tag_checked_out mostlyclean all: - @echo choose a target from: build build_linux build_win build_mac_amd64 build_mac_arm64 mostlyclean + @echo choose a target from: build build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64 mostlyclean @echo prepend \`make BUILDTAG=1\` to checkout the highest git tag before building @echo or set BUILDTAG to a specific tag # Build all binaries -build: build_linux build_win build_mac_amd64 build_mac_arm64 +build: build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64 # if BUILDTAG == 1 set it to the highest git tag ifeq ($(strip $(BUILDTAG)),1) @@ -29,7 +29,7 @@ endif ifdef BUILDTAG # add the git tag checkout to the requirements of our build targets -build_linux build_win build_mac_amd64 build_mac_arm64: tag_checked_out +build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64: tag_checked_out endif tag_checked_out: @@ -69,31 +69,49 @@ LDFLAGS = -ldflags "-X github.com/gocsaf/csaf/v3/util.SemVersion=$(SEMVER)" # Build binaries and place them under bin-$(GOOS)-$(GOARCH) # Using 'Target-specific Variable Values' to specify the build target system -GOARCH = amd64 -build_linux: GOOS = linux -build_win: GOOS = windows -build_mac_amd64: GOOS = darwin +build_linux: GOOS=linux +build_linux: GOARCH=amd64 -build_mac_arm64: GOARCH = arm64 -build_mac_arm64: GOOS = darwin +build_win: GOOS=windows +build_win: GOARCH=amd64 -build_linux build_win build_mac_amd64 build_mac_arm64: +build_mac_amd64: GOOS=darwin +build_mac_amd64: GOARCH=amd64 + +build_mac_arm64: GOOS=darwin +build_mac_arm64: GOARCH=arm64 + +build_linux_arm64: GOOS=linux +build_linux_arm64: GOARCH=arm64 + +build_win_arm64: GOOS=windows +build_win_arm64: GOARCH=arm64 + +build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64: $(eval BINDIR = bin-$(GOOS)-$(GOARCH)/ ) $(MKDIR) $(BINDIR) env GOARCH=$(GOARCH) GOOS=$(GOOS) $(BUILD) -o $(BINDIR) $(LDFLAGS) -v ./cmd/... DISTDIR := csaf-$(SEMVER) -dist: build_linux build_win build_mac_amd64 build_mac_arm64 +dist: build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64 mkdir -p dist mkdir -p dist/$(DISTDIR)-windows-amd64/bin-windows-amd64 + mkdir -p dist/$(DISTDIR)-windows-arm64/bin-windows-arm64 cp README.md dist/$(DISTDIR)-windows-amd64 + cp README.md dist/$(DISTDIR)-windows-arm64 cp bin-windows-amd64/csaf_uploader.exe bin-windows-amd64/csaf_validator.exe \ bin-windows-amd64/csaf_checker.exe bin-windows-amd64/csaf_downloader.exe \ dist/$(DISTDIR)-windows-amd64/bin-windows-amd64/ + cp bin-windows-arm64/csaf_uploader.exe bin-windows-arm64/csaf_validator.exe \ + bin-windows-arm64/csaf_checker.exe bin-windows-arm64/csaf_downloader.exe \ + dist/$(DISTDIR)-windows-arm64/bin-windows-arm64/ mkdir -p dist/$(DISTDIR)-windows-amd64/docs + mkdir -p dist/$(DISTDIR)-windows-arm64/docs cp docs/csaf_uploader.md docs/csaf_validator.md docs/csaf_checker.md \ docs/csaf_downloader.md dist/$(DISTDIR)-windows-amd64/docs + cp docs/csaf_uploader.md docs/csaf_validator.md docs/csaf_checker.md \ + docs/csaf_downloader.md dist/$(DISTDIR)-windows-arm64/docs mkdir -p dist/$(DISTDIR)-macos/bin-darwin-amd64 \ dist/$(DISTDIR)-macos/bin-darwin-arm64 \ dist/$(DISTDIR)-macos/docs @@ -103,15 +121,20 @@ dist: build_linux build_win build_mac_amd64 build_mac_arm64 cp docs/$${f}.md dist/$(DISTDIR)-macos/docs ; \ done mkdir dist/$(DISTDIR)-gnulinux-amd64 + mkdir dist/$(DISTDIR)-gnulinux-arm64 cp -r README.md bin-linux-amd64 dist/$(DISTDIR)-gnulinux-amd64 + cp -r README.md bin-linux-arm64 dist/$(DISTDIR)-gnulinux-arm64 # adjust which docs to copy mkdir -p dist/tmp_docs cp -r docs/examples dist/tmp_docs cp docs/*.md dist/tmp_docs cp -r dist/tmp_docs dist/$(DISTDIR)-gnulinux-amd64/docs + cp -r dist/tmp_docs dist/$(DISTDIR)-gnulinux-arm64/docs rm -rf dist/tmp_docs cd dist/ ; zip -r $(DISTDIR)-windows-amd64.zip $(DISTDIR)-windows-amd64/ + cd dist/ ; zip -r $(DISTDIR)-windows-arm64.zip $(DISTDIR)-windows-arm64/ cd dist/ ; tar -cvmlzf $(DISTDIR)-gnulinux-amd64.tar.gz $(DISTDIR)-gnulinux-amd64/ + cd dist/ ; tar -cvmlzf $(DISTDIR)-gnulinux-arm64.tar.gz $(DISTDIR)-gnulinux-arm64/ cd dist/ ; tar -cvmlzf $(DISTDIR)-macos.tar.gz $(DISTDIR)-macos # Remove bin-*-* and dist directories From f6927154bf7517adcc6afef29e1244dbbc604647 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Mon, 1 Sep 2025 15:40:26 +0200 Subject: [PATCH 149/176] improve calculated version numbers (#651) for modified git workspaces a `-modified` is added to the semantic version in the makefile. --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 8f164fc..7bb8ef9 100644 --- a/Makefile +++ b/Makefile @@ -47,7 +47,7 @@ tag_checked_out: # In this case we might in some situations see an error like # `/bin/bash: line 1: 2b55bbb: value too great for base (error token is "2b55bbb")` # which can be ignored. -GITDESC := $(shell git describe --tags --always 2>/dev/null || true) +GITDESC := $(shell git describe --tags --always --dirty=-modified 2>/dev/null || true) CURRENT_FOLDER_NAME := $(notdir $(CURDIR)) ifeq ($(strip $(GITDESC)),) SEMVER := $(CURRENT_FOLDER_NAME) From 1a2a8fae9c23cce626be07f7d8d6888823ae507c Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Mon, 1 Sep 2025 15:40:42 +0200 Subject: [PATCH 150/176] improve docs (minor) for csaf_provider (#668) * add a "both" to explain the config file option `certificate_and_password` better. --- docs/csaf_provider.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index cb27f9f..2fc5354 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -58,7 +58,8 @@ The following example file documents all available configuration options: # The following shows an example of a manually set prefix: #canonical_url_prefix = "https://localhost" -# Require users to use a password and a valid Client Certificate for write access. +# Require users to use both +# (1) a password and (2) a valid Client Certificate for write access. #certificate_and_password = false # Allow the user to send the request without having to send a passphrase From 187d1146311159fbb7f91d5019ee4d6eb479ff16 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Mon, 1 Sep 2025 16:13:57 +0200 Subject: [PATCH 151/176] Remove unnecessary URL joins (#676) This should avoid bugs for more complex scenarios. --- cmd/csaf_aggregator/mirror.go | 5 ++-- cmd/csaf_checker/processor.go | 47 ++++++++++++------------------- cmd/csaf_checker/roliecheck.go | 18 +++--------- cmd/csaf_downloader/downloader.go | 13 +++------ csaf/advisories.go | 28 +++++++++--------- 5 files changed, 41 insertions(+), 70 deletions(-) diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index a013553..9653ea9 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -67,17 +67,16 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) { // Collecting the categories per label. w.categories = map[string]util.Set[string]{} - base, err := url.Parse(w.loc) + pmdURL, err := url.Parse(w.loc) if err != nil { return nil, err } - base.Path = "" afp := csaf.NewAdvisoryFileProcessor( w.client, w.expr, w.metadataProvider, - base) + pmdURL) afp.AgeAccept = w.provider.ageAccept(w.processor.cfg) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 18ef49e..6e780ca 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -628,14 +628,9 @@ var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`) // mistakes, from conforming filenames to invalid advisories. func (p *processor) integrity( files []csaf.AdvisoryFile, - base string, mask whereType, lg func(MessageType, string, ...any), ) error { - b, err := url.Parse(base) - if err != nil { - return err - } client := p.httpClient() var data bytes.Buffer @@ -647,7 +642,7 @@ func (p *processor) integrity( continue } - u := misc.JoinURL(b, fp).String() + u := fp.String() // Should this URL be ignored? if p.cfg.ignoreURL(u) { @@ -779,7 +774,7 @@ func (p *processor) integrity( lg(ErrorType, "Bad URL %s: %v", x.url(), err) continue } - hashFile := misc.JoinURL(b, hu).String() + hashFile := hu.String() p.checkTLS(hashFile) if res, err = client.Get(hashFile); err != nil { @@ -828,7 +823,7 @@ func (p *processor) integrity( lg(ErrorType, "Bad URL %s: %v", f.SignURL(), err) continue } - sigFile := misc.JoinURL(b, su).String() + sigFile := su.String() p.checkTLS(sigFile) p.badSignatures.use() @@ -948,12 +943,13 @@ func (p *processor) checkIndex(base string, mask whereType) error { scanner := bufio.NewScanner(res.Body) for line := 1; scanner.Scan(); line++ { u := scanner.Text() - if _, err := url.Parse(u); err != nil { + up, err := url.Parse(u) + if err != nil { p.badIntegrities.error("index.txt contains invalid URL %q in line %d", u, line) continue } - files = append(files, csaf.DirectoryAdvisoryFile{Path: u}) + files = append(files, csaf.DirectoryAdvisoryFile{Path: misc.JoinURL(bu, up).String()}) } return files, scanner.Err() }() @@ -968,7 +964,7 @@ func (p *processor) checkIndex(base string, mask whereType) error { // Block rolie checks. p.labelChecker.feedLabel = "" - return p.integrity(files, base, mask, p.badIndices.add) + return p.integrity(files, mask, p.badIndices.add) } // checkChanges fetches the "changes.csv" and calls the "checkTLS" method for HTTPs checks. @@ -1035,8 +1031,13 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] + pathURL, err := url.Parse(path) + if err != nil { + return nil, nil, err + } + times, files = append(times, t), - append(files, csaf.DirectoryAdvisoryFile{Path: path}) + append(files, csaf.DirectoryAdvisoryFile{Path: misc.JoinURL(bu, pathURL).String()}) p.timesChanges[path] = t } return times, files, nil @@ -1063,7 +1064,7 @@ func (p *processor) checkChanges(base string, mask whereType) error { // Block rolie checks. p.labelChecker.feedLabel = "" - return p.integrity(files, base, mask, p.badChanges.add) + return p.integrity(files, mask, p.badChanges.add) } // empty checks if list of strings contains at least one none empty string. @@ -1364,18 +1365,11 @@ func (p *processor) checkSecurityFolder(folder string) string { } // Try to load - up, err := url.Parse(u) + _, err = url.Parse(u) if err != nil { return fmt.Sprintf("CSAF URL '%s' invalid: %v", u, err) } - base, err := url.Parse(folder) - if err != nil { - return err.Error() - } - base.Path = "" - - u = misc.JoinURL(base, up).String() p.checkTLS(u) if res, err = client.Get(u); err != nil { return fmt.Sprintf("Cannot fetch %s from security.txt: %v", u, err) @@ -1523,12 +1517,6 @@ func (p *processor) checkPGPKeys(_ string) error { client := p.httpClient() - base, err := url.Parse(p.pmdURL) - if err != nil { - return err - } - base.Path = "" - for i := range keys { key := &keys[i] if key.URL == nil { @@ -1541,10 +1529,11 @@ func (p *processor) checkPGPKeys(_ string) error { continue } - u := misc.JoinURL(base, up).String() + // Todo: refactor all methods to directly accept *url.URL + u := up.String() p.checkTLS(u) - res, err := client.Get(u) + res, err := client.Get(*key.URL) if err != nil { p.badPGPs.error("Fetching public OpenPGP key %s failed: %v.", u, err) continue diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index ace4d0d..f510992 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -10,7 +10,6 @@ package main import ( "errors" - "github.com/gocsaf/csaf/v3/internal/misc" "net/http" "net/url" "sort" @@ -217,12 +216,6 @@ func defaults[T any](p *T, def T) T { // processROLIEFeeds goes through all ROLIE feeds and checks their // integrity and completeness. func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { - - base, err := url.Parse(p.pmdURL) - if err != nil { - return err - } - base.Path = "" p.badROLIEFeed.use() advisories := map[*csaf.Feed][]csaf.AdvisoryFile{} @@ -234,12 +227,11 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { if feed.URL == nil { continue } - up, err := url.Parse(string(*feed.URL)) + feedBase, err := url.Parse(string(*feed.URL)) if err != nil { p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err) continue } - feedBase := misc.JoinURL(base, up) feedURL := feedBase.String() p.checkTLS(feedURL) @@ -266,13 +258,12 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { continue } - up, err := url.Parse(string(*feed.URL)) + feedURL, err := url.Parse(string(*feed.URL)) if err != nil { p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err) continue } - feedURL := misc.JoinURL(base, up) feedBase, err := util.BaseURL(feedURL) if err != nil { p.badProviderMetadata.error("Bad base path: %v", err) @@ -292,7 +283,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { // TODO: Issue a warning if we want check AMBER+ without an // authorizing client. - if err := p.integrity(files, base.String(), rolieMask, p.badProviderMetadata.add); err != nil { + if err := p.integrity(files, rolieMask, p.badProviderMetadata.add); err != nil { if err != errContinue { return err } @@ -321,13 +312,12 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { continue } - up, err := url.Parse(string(*feed.URL)) + feedBase, err := url.Parse(string(*feed.URL)) if err != nil { p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err) continue } - feedBase := misc.JoinURL(base, up) makeAbs := makeAbsolute(feedBase) label := defaults(feed.TLPLabel, csaf.TLPLabelUnlabeled) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 4890593..4edd724 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -226,18 +226,16 @@ func (d *downloader) download(ctx context.Context, domain string) error { } } - base, err := url.Parse(lpmd.URL) + pmdURL, err := url.Parse(lpmd.URL) if err != nil { return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err) } - base.Path = "" expr := util.NewPathEval() if err := d.loadOpenPGPKeys( client, lpmd.Document, - base, expr, ); err != nil { return err @@ -247,7 +245,7 @@ func (d *downloader) download(ctx context.Context, domain string) error { client, expr, lpmd.Document, - base) + pmdURL) // Do we need time range based filtering? if d.cfg.Range != nil { @@ -312,7 +310,6 @@ allFiles: func (d *downloader) loadOpenPGPKeys( client util.Client, doc any, - base *url.URL, expr *util.PathEval, ) error { src, err := expr.Eval("$.public_openpgp_keys", doc) @@ -337,7 +334,7 @@ func (d *downloader) loadOpenPGPKeys( if key.URL == nil { continue } - up, err := url.Parse(*key.URL) + u, err := url.Parse(*key.URL) if err != nil { slog.Warn("Invalid URL", "url", *key.URL, @@ -345,9 +342,7 @@ func (d *downloader) loadOpenPGPKeys( continue } - u := base.JoinPath(up.Path).String() - - res, err := client.Get(u) + res, err := client.Get(u.String()) if err != nil { slog.Warn( "Fetching public OpenPGP key failed", diff --git a/csaf/advisories.go b/csaf/advisories.go index c5e4fea..33dfa03 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -12,7 +12,6 @@ import ( "context" "encoding/csv" "fmt" - "github.com/gocsaf/csaf/v3/internal/misc" "io" "log/slog" "net/http" @@ -20,6 +19,7 @@ import ( "strings" "time" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -96,7 +96,7 @@ type AdvisoryFileProcessor struct { client util.Client expr *util.PathEval doc any - base *url.URL + pmdURL *url.URL } // NewAdvisoryFileProcessor constructs a filename extractor @@ -105,13 +105,13 @@ func NewAdvisoryFileProcessor( client util.Client, expr *util.PathEval, doc any, - base *url.URL, + pmdURL *url.URL, ) *AdvisoryFileProcessor { return &AdvisoryFileProcessor{ client: client, expr: expr, doc: doc, - base: base, + pmdURL: pmdURL, } } @@ -180,7 +180,7 @@ func (afp *AdvisoryFileProcessor) Process( // Not found -> fall back to PMD url if empty(dirURLs) { - baseURL, err := util.BaseURL(afp.base) + baseURL, err := util.BaseURL(afp.pmdURL) if err != nil { return err } @@ -262,8 +262,13 @@ func (afp *AdvisoryFileProcessor) loadChanges( continue } + pathURL, err := url.Parse(path) + if err != nil { + return nil, err + } + files = append(files, - DirectoryAdvisoryFile{Path: base.JoinPath(path).String()}) + DirectoryAdvisoryFile{Path: misc.JoinURL(base, pathURL).String()}) } return files, nil } @@ -277,12 +282,11 @@ func (afp *AdvisoryFileProcessor) processROLIE( if feed.URL == nil { continue } - up, err := url.Parse(string(*feed.URL)) + feedURL, err := url.Parse(string(*feed.URL)) if err != nil { slog.Error("Invalid URL in feed", "feed", *feed.URL, "err", err) continue } - feedURL := misc.JoinURL(afp.base, up) slog.Info("Got feed URL", "feed", feedURL) fb, err := util.BaseURL(feedURL) @@ -290,12 +294,6 @@ func (afp *AdvisoryFileProcessor) processROLIE( slog.Error("Invalid feed base URL", "url", fb, "err", err) continue } - feedBaseURL, err := url.Parse(fb) - if err != nil { - slog.Error("Cannot parse feed base URL", "url", fb, "err", err) - continue - } - feedBaseURL.Path = "" res, err := afp.client.Get(feedURL.String()) if err != nil { @@ -327,7 +325,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( slog.Error("Invalid URL", "url", u, "err", err) return "" } - return misc.JoinURL(feedBaseURL, p).String() + return p.String() } rfeed.Entries(func(entry *Entry) { From d1f33ab27dc55948822bdaa5b69c324863258f1f Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Mon, 8 Sep 2025 13:10:50 +0200 Subject: [PATCH 152/176] fix incorrect usage of formatted string output probably unchanged, but now `go vet` is happy that formatted strings are not misused --- cmd/csaf_checker/processor.go | 42 ++++++++++++++++---------------- internal/models/models_test.go | 8 +++--- internal/options/options_test.go | 6 ++--- 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 6e780ca..e427b44 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -536,7 +536,7 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { if len(errors) > 0 { p.badProviderMetadata.error("%s: Validating against JSON schema failed:", feed) for _, msg := range errors { - p.badProviderMetadata.error(strings.ReplaceAll(msg, `%`, `%%`)) + p.badProviderMetadata.error("%s", strings.ReplaceAll(msg, `%`, `%%`)) } } @@ -736,7 +736,7 @@ func (p *processor) integrity( switch date, fault := p.extractTime(doc, `initial_release_date`, u); { case fault != "": - p.badFolders.error(fault) + p.badFolders.error("%s", fault) case folderYear == nil: p.badFolders.error("No year folder found in %s", u) case date.UTC().Year() != *folderYear: @@ -744,7 +744,7 @@ func (p *processor) integrity( } current, fault := p.extractTime(doc, `current_release_date`, u) if fault != "" { - p.badChanges.error(fault) + p.badChanges.error("%s", fault) } else { p.timesAdv[f.URL()] = current } @@ -814,7 +814,7 @@ func (p *processor) integrity( msgType = InfoType } for _, fetchError := range hashFetchErrors { - p.badIntegrities.add(msgType, fetchError) + p.badIntegrities.add(msgType, "%s", fetchError) } // Check signature @@ -1052,7 +1052,7 @@ func (p *processor) checkChanges(base string, mask whereType) error { if p.cfg.Range != nil { filtered = " (maybe filtered out by time interval)" } - p.badChanges.warn("no entries in changes.csv found" + filtered) + p.badChanges.warn("%s", "no entries in changes.csv found"+filtered) } if !sort.SliceIsSorted(times, func(i, j int) bool { @@ -1300,8 +1300,8 @@ func (p *processor) checkProviderMetadata(domain string) bool { for i := range lpmd.Messages { p.badProviderMetadata.warn( - "Unexpected situation while loading provider-metadata.json: " + - lpmd.Messages[i].Message) + "Unexpected situation while loading provider-metadata.json: %s", + lpmd.Messages[i].Message) } if !lpmd.Valid() { @@ -1401,25 +1401,25 @@ func (p *processor) checkDNS(domain string) { res, err := client.Get(path) if err != nil { p.badDNSPath.add(ErrorType, - fmt.Sprintf("Fetching %s failed: %v", path, err)) + "Fetching %s failed: %v", path, err) return } if res.StatusCode != http.StatusOK { - p.badDNSPath.add(ErrorType, fmt.Sprintf("Fetching %s failed. Status code %d (%s)", - path, res.StatusCode, res.Status)) + p.badDNSPath.add(ErrorType, "Fetching %s failed. Status code %d (%s)", + path, res.StatusCode, res.Status) } hash := sha256.New() defer res.Body.Close() content, err := io.ReadAll(res.Body) if err != nil { p.badDNSPath.add(ErrorType, - fmt.Sprintf("Error while reading the response from %s", path)) + "Error while reading the response from %s", path) } hash.Write(content) if !bytes.Equal(hash.Sum(nil), p.pmd256) { p.badDNSPath.add(ErrorType, - fmt.Sprintf("%s does not serve the same provider-metadata.json as previously found", - path)) + "%s does not serve the same provider-metadata.json as previously found", + path) } } @@ -1433,12 +1433,12 @@ func (p *processor) checkWellknown(domain string) { res, err := client.Get(path) if err != nil { p.badWellknownMetadata.add(ErrorType, - fmt.Sprintf("Fetching %s failed: %v", path, err)) + "Fetching %s failed: %v", path, err) return } if res.StatusCode != http.StatusOK { - p.badWellknownMetadata.add(ErrorType, fmt.Sprintf("Fetching %s failed. Status code %d (%s)", - path, res.StatusCode, res.Status)) + p.badWellknownMetadata.add(ErrorType, "Fetching %s failed. Status code %d (%s)", + path, res.StatusCode, res.Status) } } @@ -1475,13 +1475,13 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { // but found in the legacy location, and inform about finding it there (2). switch warnings { case 0: - p.badSecurity.add(InfoType, sDMessage) + p.badSecurity.add(InfoType, "%s", sDMessage) case 1: - p.badSecurity.add(ErrorType, sDMessage) - p.badSecurity.add(ErrorType, sLMessage) + p.badSecurity.add(ErrorType, "%s", sDMessage) + p.badSecurity.add(ErrorType, "%s", sLMessage) case 2: - p.badSecurity.add(WarnType, sDMessage) - p.badSecurity.add(InfoType, sLMessage) + p.badSecurity.add(WarnType, "%s", sDMessage) + p.badSecurity.add(InfoType, "%s", sLMessage) } p.checkDNS(domain) diff --git a/internal/models/models_test.go b/internal/models/models_test.go index 777a428..48cd02d 100644 --- a/internal/models/models_test.go +++ b/internal/models/models_test.go @@ -81,7 +81,7 @@ func TestUnmarshalText(t *testing.T) { byteSlice := []byte{'3', 'h'} var emptySlice []byte if testTimeRange.UnmarshalText(byteSlice) != nil { - t.Errorf(testTimeRange.UnmarshalText(byteSlice).Error()) + t.Error(testTimeRange.UnmarshalText(byteSlice).Error()) } if testTimeRange.UnmarshalText(emptySlice) == nil { t.Errorf("Failure: UnmarshalText succeeded on invalid slice of bytes.") @@ -104,10 +104,10 @@ func TestUnmarshalFlag(t *testing.T) { time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC), time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC)) if err := testTimeRange.UnmarshalFlag("3h"); err != nil { - t.Errorf(err.Error()) + t.Error(err.Error()) } if err := testTimeRange.UnmarshalFlag("2006-01-02T15:04:05"); err != nil { - t.Errorf(err.Error()) + t.Error(err.Error()) } if err := testTimeRange.UnmarshalFlag("2006-01-02T15:04:05a"); err == nil { t.Errorf("Failure: Extracted time from invalid string") @@ -119,7 +119,7 @@ func TestUnmarshalFlag(t *testing.T) { t.Errorf("Failure: Extracted time from invalid string") } if err := testTimeRange.UnmarshalFlag("2006-01-02T15:04:05, 2007-01-02T15:04:05"); err != nil { - t.Errorf(err.Error()) + t.Error(err.Error()) } } diff --git a/internal/options/options_test.go b/internal/options/options_test.go index 2768e37..6e96838 100644 --- a/internal/options/options_test.go +++ b/internal/options/options_test.go @@ -90,7 +90,7 @@ func TestParse(t *testing.T) { cmd.Env = append(os.Environ(), "TEST_HELP=1") err := cmd.Run() if err != nil { - t.Fatalf(err.Error()) + t.Fatal(err.Error()) } // test the version flag @@ -104,7 +104,7 @@ func TestParse(t *testing.T) { cmd.Env = append(os.Environ(), "TEST_VERSION=1") err = cmd.Run() if err != nil { - t.Fatalf(err.Error()) + t.Fatal(err.Error()) } } @@ -140,7 +140,7 @@ func TestLoadToml(t *testing.T) { t.Errorf("Failure: Succeeded in parsing nonexistant parameter") } if err := loadTOML(&cfg, "data/config.toml"); err != nil { - t.Errorf(err.Error()) + t.Error(err.Error()) } } From 5c1b0612551662490b90f11512e1c09fdec64447 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Fri, 12 Sep 2025 11:38:56 +0200 Subject: [PATCH 153/176] Rename workflow go_legacy to "Go Test (oldstable)" so it is distinct from the other "Go" workflow --- .github/workflows/go_legacy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/go_legacy.yml b/.github/workflows/go_legacy.yml index a86368d..fda6413 100644 --- a/.github/workflows/go_legacy.yml +++ b/.github/workflows/go_legacy.yml @@ -1,4 +1,4 @@ -name: Go +name: Go Test (oldstable) on: push: From bcb7c8be10c662216edc310854a2fa12631f00cb Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 12 Sep 2025 11:41:13 +0200 Subject: [PATCH 154/176] rename go_legacy.yml -> go-oldstable.yml --- .github/workflows/{go_legacy.yml => go-oldstable.yml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{go_legacy.yml => go-oldstable.yml} (100%) diff --git a/.github/workflows/go_legacy.yml b/.github/workflows/go-oldstable.yml similarity index 100% rename from .github/workflows/go_legacy.yml rename to .github/workflows/go-oldstable.yml From 0dbf822cbdcaa3737f12d21f82a3f1fb59fc085c Mon Sep 17 00:00:00 2001 From: mgoetzegb Date: Mon, 15 Sep 2025 12:42:30 +0200 Subject: [PATCH 155/176] fix doc comment: remove untrue claim of disallowing unknown fields (#677) adjust comment to fit https://github.com/gocsaf/csaf/pull/655/commits/7935818600ee70cbcb7784a67788a4f3bacaba01 --- internal/misc/json.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/misc/json.go b/internal/misc/json.go index 4ecc6a5..d9e87c3 100644 --- a/internal/misc/json.go +++ b/internal/misc/json.go @@ -15,7 +15,7 @@ import ( ) // StrictJSONParse creates a JSON decoder that decodes an interface -// while not allowing unknown fields nor trailing data +// while not allowing trailing data func StrictJSONParse(jsonData io.Reader, target any) error { decoder := json.NewDecoder(jsonData) From 05eae0a9ae6f945f946dd815ec7463da5ba0a7de Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Wed, 1 Oct 2025 11:14:09 +0200 Subject: [PATCH 156/176] Re-add unknown fields check (#681) --- internal/misc/json.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/internal/misc/json.go b/internal/misc/json.go index d9e87c3..2888302 100644 --- a/internal/misc/json.go +++ b/internal/misc/json.go @@ -18,6 +18,8 @@ import ( // while not allowing trailing data func StrictJSONParse(jsonData io.Reader, target any) error { decoder := json.NewDecoder(jsonData) + // Don't allow unknown fields + decoder.DisallowUnknownFields() if err := decoder.Decode(target); err != nil { return fmt.Errorf("JSON decoding error: %w", err) From c6bad42c24b6262ebc07fd48a2d622be9162088a Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 22 Oct 2025 16:57:00 +0200 Subject: [PATCH 157/176] Improve LoadCertificate unit test (#692) * fix `LoadCertificate` unit test replaced certificate with invalid dns name, which is rejected by stdlib of Go version >=1.25.2. Change in Go introduced by https://github.com/golang/go/issues/75715 * code review: add script to generate certificates, remove `greenbone` org entry * code review: add license header * rework cert creation and fix one filename --------- Co-authored-by: Marius Goetze --- internal/certs/certs_test.go | 10 +- internal/certs/createTestCerts.sh | 60 +++++ internal/certs/data/cert.crt | 61 +++-- internal/certs/data/private.pem | 42 ++++ internal/certs/data/privated.pem | 42 ---- internal/certs/data/testclient.crt | 49 ++-- internal/certs/data/testclientkey.pem | 308 +++++++++++++------------- 7 files changed, 311 insertions(+), 261 deletions(-) create mode 100755 internal/certs/createTestCerts.sh create mode 100644 internal/certs/data/private.pem delete mode 100644 internal/certs/data/privated.pem diff --git a/internal/certs/certs_test.go b/internal/certs/certs_test.go index e2f1af5..5bd7025 100644 --- a/internal/certs/certs_test.go +++ b/internal/certs/certs_test.go @@ -20,13 +20,13 @@ func TestLoadCertificates(t *testing.T) { passphrase = "qwer" missingCert = "data/testclientcert_missing.crt" missingTestkey = "data/testclientkey_missing.pem" - privateKey = "data/privated.pem" + privateKey = "data/private.pem" privateCert = "data/cert.crt" ) // Try to load cert that is not protected, expect success. if cert, err := LoadCertificate(&testCert, &testKey, nil); cert == nil || err != nil { - t.Errorf("Failure: Couldn't load supposedly valid certificate.") + t.Errorf("Failure: Couldn't load supposedly valid certificate. Got error: %v", err) } // Try to load no cert, expect error. if cert, err := LoadCertificate(nil, &testKey, nil); cert != nil || err == nil { @@ -46,7 +46,7 @@ func TestLoadCertificates(t *testing.T) { } // Try to load encrypted cert, expecting success. if cert, err := LoadCertificate(&privateCert, &privateKey, &passphrase); cert == nil || err != nil { - t.Errorf("Failure: Couldn't load supposedly valid encrypted certificate.") + t.Errorf("Failure: Couldn't load supposedly valid encrypted certificate. Got error: %v", err) } // Try to load wrong encrypted cert, expecting error. if cert, err := LoadCertificate(&testKey, &privateKey, &passphrase); cert != nil || err == nil { @@ -56,8 +56,8 @@ func TestLoadCertificates(t *testing.T) { if cert, err := LoadCertificate(&missingCert, &privateKey, &passphrase); cert != nil || err == nil { t.Errorf("Failure: No Failure while loading nonexistens certificate.") } - // Try to load nonexistent encrypted cert, expecting error. + // Try to load nonexistent encrypted cert, expecting success. if cert, err := LoadCertificate(nil, nil, nil); cert != nil || err != nil { - t.Errorf("Failure: Expected nil return.") + t.Errorf("Failure: Expected nil return. Got error: %v", err) } } diff --git a/internal/certs/createTestCerts.sh b/internal/certs/createTestCerts.sh new file mode 100755 index 0000000..084677e --- /dev/null +++ b/internal/certs/createTestCerts.sh @@ -0,0 +1,60 @@ +#!/bin/bash +# +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: 2025 German Federal Office for Information Security (BSI) +# Software-Engineering: 2025 Intevation GmbH + +# cab be used to generated the certificates for the go tests +# as the resulting files are in the repository, this script does not +# need to be run each time, its purpose is to document how the keys and +# certs were created + +set -e + +certtool --generate-privkey --outfile testserver-key.pem + + +echo ' +organization = "CSAF" +unit = "CSAF Distribution" +country = "DE" +cn = "csaf.test" + +dns_name = "csaf.test" +dns_name = "localhost" +dns_name = "*.csaf.test" +ip_address = "127.0.0.1" +ip_address = "::1" + +tls_www_server +tls_www_client +ocsp_signing_key +encryption_key +signing_key +expiration_days = 36500 +' > gnutls-certtool.testserver.template + +certtool --generate-self-signed --load-privkey testserver-key.pem --outfile cert.crt --template gnutls-certtool.testserver.template --stdout | head -1 + +# for testing legacy code path, we use openssl's traditional mode to +# create a password protected variant after RFC 1423 that still can be read +# by https://pkg.go.dev/crypto/x509#DecryptPEMBlock. Citation: +# Legacy PEM encryption as specified in RFC 1423 is insecure by design. +# Since it does not authenticate the ciphertext, it is vulnerable +# to padding oracle attacks that can let an attacker recover the plaintext. +openssl rsa -in testserver-key.pem -out private.pem -aes256 -passout pass:qwer -traditional + +echo ' +organization = "CSAF Tools Development (internal)" +country = "DE" +cn = "Tester" + +tls_www_client +encryption_key +signing_key + +expiration_days = 36500 +' > gnutls-certtool.testclientkey.template + +certtool --generate-privkey --bits 3072 --outfile testclientkey.pem +certtool --generate-self-signed --load-privkey testclientkey.pem --template gnutls-certtool.testclientkey.template --outfile testclient.crt diff --git a/internal/certs/data/cert.crt b/internal/certs/data/cert.crt index f80d61c..f814e3e 100644 --- a/internal/certs/data/cert.crt +++ b/internal/certs/data/cert.crt @@ -1,37 +1,28 @@ -----BEGIN CERTIFICATE----- -MIIGajCCBNKgAwIBAgIUGNi4GgCUssOOe3k0VuHf3R0+d54wDQYJKoZIhvcNAQEL -BQAwgY0xFDASBgNVBAMTC0NvbW1vbiBuYW1lMRMwEQYDVQQLEwppbnRldmF0aW9u -MRMwEQYDVQQKEwppbnRldmF0aW9uMRMwEQYDVQQHEwppbnRldmF0aW9uMRUwEwYD -VQQIEwxMb3dlciBTYXhvbnkxCzAJBgNVBAYTAkdFMRIwEAYKCZImiZPyLGQBGRYC -REMwHhcNMjMwOTE5MDcwMDA1WhcNMjYwNjE0MDcwMDA3WjCB8DEQMA4GA1UEAxMH -cmVxdWVzdDETMBEGA1UECxMKaW50ZXZhdGlvbjETMBEGA1UEChMKaW50ZXZhdGlv -bjETMBEGA1UEBxMKb3NuYWJydWVjazEVMBMGA1UECBMMbG93ZXIgc2F4b255MQsw -CQYDVQQGEwJHRTESMBAGCgmSJomT8ixkARkWAkRDMREwDwYKCZImiZPyLGQBGRYB -LjERMA8GCgmSJomT8ixkARkWAS4xETAPBgoJkiaJk/IsZAEZFgEuMRMwEQYKCZIm -iZPyLGQBGRYDd3d3MRcwFQYKCZImiZPyLGQBARMHbm8gaWRlYTCCAaIwDQYJKoZI -hvcNAQEBBQADggGPADCCAYoCggGBAN0vZbLXtRzd61rR8Hos0BGnqCaJXIwGARwx -JojMyxASFT+KeC4QDRkgRrK6OY4k/i7TEHuUGk/Bm754++554wmmhDqv1Q4+VhhR -1K/JAz/HVZNTAR1rPKwG82lyEpPxlRNZg/QtF9DqQSoSkL/fJLs+rq4zlKozXzRE -auZ5Be8So1dXRZfMVUMDgtk+IX8+iCeZisiWfv62ttQ0EiuiXLagd6ruEuoCSVi2 -tVswsC/Hp8AI2Ro56mmHiWthuae1H8yDWUFLSe9AQW65qC/xVUgo/nMpK2BYVFKb -70TMjl/dZM0Qn1tdiNyqCkbIhXjklZvZYhO+15TPkgDXDsqRUjpTrLZXLGrD6XIx -CRLZGY6YrUfsFTjUC6JrUrAR8zY7SLsYN5sUmFUSMpJnI+T/SD4p/0CXrKrbMOjW -Qqz6FX/WHPxvswGKHk5zHYGHrzx7OKmfVa6gzUgZSfOHj2xOOR2Un9DwNavIrmSC -WYXKZqig5qDyfzBvlXWEio/5GrDwgQIDAQABo4IBWzCCAVcwgcIGA1UdEQSBujCB -t4IrYSBkbnNOYW1lIG9mIHRoZSBzdWJqZWN0IG9mIHRoZSBjZXJ0aWZpY2F0ZYI3 -YW4gYWRkaXRpb25hbCBkbnNOYW1lIG9mIHRoZSBzdWJqZWN0IG9mIHRoZSBjZXJ0 -aWZpY2F0ZYIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIP -c2Vjb25kIGFkZGl0aW9ugg50aGlyZCBhZGRpdGlvboIHZG5zTmFtZTAMBgNVHRMB -Af8EAjAAMDEGA1UdJQQqMCgGCCsGAQUFBwMJBggrBgEFBQcDAgYIKwYBBQUHAwEG -CCsGAQUFBwMCMA8GA1UdDwEB/wQFAwMHsAAwHQYDVR0OBBYEFKrFhODjTKCopb+W -Qa29PsHR4HXgMB8GA1UdIwQYMBaAFCyZxCa1ZUHVy8LjikE8zumAiEgfMA0GCSqG -SIb3DQEBCwUAA4IBgQBTrAgh6d+qiLumEfmkNCmhewxKxDZp+Ni2nz9XRzNO2cQE -U0n8MdbnQInW3xJXng2sAcl1fQz0RN1hkyjDwi69mbbPgcTYmxJFvyt+zRYBe/Sq -4CGGkxEdPW94tMpQ6SrCn2mAMnvcq9A1pYBVYyPeUsaRHC5OUBFOMCevNy8JwNyY -MJ0H5HQCyCysbzA1d521pogGUs/tmbE+ym9zpV8vG0b6De1PexjVeGkTNYz6NCR2 -VZTQ+OJ5iE5pHPEC1Qif44LrR9Kdn/wu3RjTYyHeBOJFjK+DKgleNF4QVTcZQIPE -snN4H+/VSgTZQ3kgWbtpd1m5oRBJovEc2Qe+l+iDFCk8OA4z/x+fkvOeD3NUAl7D -9Pt3cP3UtWUJp4NJn2dvUljmQhB02HSqdNBhqKSg4/cf7l8Zo1ejvBUosrlgw3C3 -apDaC4/xk7woFKVYW25teH2ze+Gpz/YsLDtmL7Bri8CGVsqsN9yqO8SstwKBa3Rt -xQ2em6XnnanApT4iFX4= +MIIE2DCCA0CgAwIBAgIUT/9u6/HtTciy3NB6UGXu+U+UzT8wDQYJKoZIhvcNAQEL +BQAwTDELMAkGA1UEBhMCREUxDTALBgNVBAoTBENTQUYxGjAYBgNVBAsTEUNTQUYg +RGlzdHJpYnV0aW9uMRIwEAYDVQQDEwljc2FmLnRlc3QwIBcNMjUxMDE3MTAyMjM1 +WhgPMjEyNTA5MjMxMDIyMzVaMEwxCzAJBgNVBAYTAkRFMQ0wCwYDVQQKEwRDU0FG +MRowGAYDVQQLExFDU0FGIERpc3RyaWJ1dGlvbjESMBAGA1UEAxMJY3NhZi50ZXN0 +MIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAwqJ45WlBG5CqW3Meewsf +Es1tqQRsHS/L6Hlz/aTZQHte/Co18qklnza0ZvK0mbPsQ8HLKXfU6Am5yw3u6vZj +XNfhWDW4QtsSk9f/y/fBADw17qYinoVyLpqZU5Z6kFRY5npY0C9bCtsAZd4qimx5 +yu/MhM8LHI9K2oKPSkFgRCTRKAo9sZ97o4wZmTxJIasOr0SPpmfMLs2sHSEqcK4d +/RxZ+OtYtd3pmE/WjxtSozCkdAccvrH+TSAuF3+/6oBiov8yX0KPNEBiiwuDXMUD +QWkjfcrxQZAswMWRo55JJYBbIjrinW8vldLooFo5trNEE2nukgRPhvLhiJdKKAeg ++A8jM/Bx7JgjRCPppIEmWdvXg+CS6L0hGj49pg3OcIiNNoufoXPRkFqmRh72n1Oj +2RC13W8H3C3SDYz20mqJhkbci+05vO/LgKj9te8xEs/xa4xCtv7ycuB2Etzf1cWS +zfz5LGXwwLI0rjpx3OAsr5i8Fukxe5maYLS9AUCTetTnAgMBAAGjga8wgawwDAYD +VR0TAQH/BAIwADAnBgNVHSUEIDAeBggrBgEFBQcDAgYIKwYBBQUHAwEGCCsGAQUF +BwMJMEQGA1UdEQQ9MDuCCWNzYWYudGVzdIIJbG9jYWxob3N0ggsqLmNzYWYudGVz +dIcEfwAAAYcQAAAAAAAAAAAAAAAAAAAAATAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0O +BBYEFN2InaQvsu6hULCYeKc6pdE4VgVHMA0GCSqGSIb3DQEBCwUAA4IBgQBjPdXd +2xHzce3mi4RlANT4nOSdpELhl54xeJDgI9Evt70N8B4uTmOI5+F6JVICE25cnDs1 +c9SoHpWzh1ZuzfiBYa/cdQNUtaTfgHLi5GYtV1DzmKXVRUciBiNBWWxYMbTGvTOO +i3r6DEgOYuukeL4qj//EGOcTJEarHVSxPMuXTD/PoP/VpIdqRS9drEpFUC6lecZc +UJtUPAcyx0oD2vNmPmulDfYFMLLOPrIeNa0g7os4wgUl7+9wR1cPPRTXY0fW6Hoi +j+a8Qn80Q3PrOuEO/SZ4aHHpOk90bRqofyIhFjPwS0YN5w/Sn23uq1u2Dx+Zy+5K +6Cs9p5dJWu5/zU4ZdbQlpYIHXQVbido1TY92Z84skEsac2wVh7L2LMB3p3Gu9WYn +oKqFYCw5FICvRgyh1KG8QWhW59Em0Jxr8rTw6qyBQACdixKy6/1ok2ArMivTC8Gd +rEbefshgc6dnAZCAp1MjCU+tg9iYEymSSLdOtUKvHEIosUGO1p5ol0hReTQ= -----END CERTIFICATE----- diff --git a/internal/certs/data/private.pem b/internal/certs/data/private.pem new file mode 100644 index 0000000..483283c --- /dev/null +++ b/internal/certs/data/private.pem @@ -0,0 +1,42 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-256-CBC,3ACC7169D177F0159193ACAF3B3997A3 + +DjxOUO2dbAAeHUtP2jSW/7zpVTWeRaJi5Kce74r1TB2DJ4FYI8361ZZcOrjISGQJ +33f1Ic+8gv3P5ORzGAIfxzSmwQLk5y45da7of2dj69FXba+WoGNKgMS/KMmj+CvR +XylNJl4RE5zovePkPvk2JDvyjg+POMMu3UTOoxJzSTmifV6F7msuFTHMHhs3edSs +PUAHprSW7Qh5dYq3VK8tuqg9qdy3uLajpZkg9b9bBfaiku+SiRfwsdCjeAuubiJK +ctyPQclE5B1jEgJit6odjzsLENB9uCzkgq61UPoxbT6URZ0jJwhEZgh15UAr74QP +KAElD8Q7V2Z0w31vPhBcMIyrSaNlMr5p4teNFlMEZRa0lhNOXp7AY0DwBtioX2bR +VCxFTk409L/gVaweUnS0jzY0cj/pU1L1I5OWScDjCRkkj0Vk40S/zcy5esz85b5r +rGRxdRKqJIIZeb3r7WdvINFnNXL/KL/hxVruZcZse8cV3Na+w4rH+AHElMd51tZ+ +RKEBDqH0jlg3aelfAWXkV96pUtH/4lTSZ1+huQyHLUjTULll7L6BtxNGzY071buS +0CaTFyRcaipKYkXQjmrA49uTWQzrEgqiRZ4exh/gAaM/tEgVRfo/49Xo5wrTsGr0 +4Q0hBnUYAa+cVL7K8z2WAk1qerb1CsmiyjQZFI1S6z10ugS6zTDdB/kwW5ZvAzWB +/DXc9rJlgTFLbZK7Oty/IDayYkWD3BjfOV94oMeogK0eworAMxhvfIFkPxRHwhIp +9KfBw7xsa2gJECbi8BvrsV69PHn6EHmphn7NMpc8A3KmBFv1uOqWu9P7ef67+e+U +JprzVt2mUDoTUayzVkwQPy3rm5wWxVanHqtRXig3RN3pnreEv1AdfTKLfCxE2jvo +9fh6hNo3urgIL1KFXHjiXVRt03RGfpWfAI3JKqhkWOqZ7rVT19AuJ6On2J1dVMkm +TFelKdX97YlvMfNdKp1pkzOjZ2f4ehL5WCkMq88VgDrTmZv+CfcnrRslsLP6MSpX +scAMFDdkzSBUH3NyHxxkstcs5xQm1SuPN/omB7rpYgfhD6HwdgZNEAINtMNgIIoR +tW34hGkV6BhI+2y+pkIndm63JVikrbuLKiwTjwynFJWKTWgRBMR/BvJ1Bq/IfJNo +pC/hIpN95vUbHGzHRfmO9v5HiaAaBYGs59gL6WS0OlsyFXMr6a9ZmBDbZ7TD94Ax +IAhGhRE+5OpF/kWLfOriXMEbyY/oNoN1y7jdpMdmncq2/26/OhL8RFUKPlCbz0LN +5FUv7ouW8kvUgy5tGu78iPu6MNI+BzqLg+TrUu2bufajS+/VGAFo/2PX896n+2FJ +cP2DXlmFgC6udIeWsGNJI8Y50fC+YZxN+UthLOctiOgM4pGK1UDl8JQLbt0xRrJA +MI5XkbXJJYBdjHaqg8WGF260UgWhlD9sdJc7ntLX9S+3DoOboSwmYu4Y8p15e4Cg +8LHgW4NmnBFPX5/oyYMVCt7SWEnnwGEeebu+YgD9fbFAsag5TpE04zpx58rCW5bh +sJqRBCcZE5rqO9CUF1fYu0F24fv+E3LK9lujCMARVfJk8CLUg7VFL9dY2XWEfHsO +plZ0lmc5BntBoQ5r+xK/6TbK5nn1Fo+JPRjnDaE++QdVx9ZVjtT+a/wCD5NJr13k +dByZ3eCz5+mZUBGD1PWh5C+iyL3Wpq29b3EsHfSIMzOZsCpY2jkC8Jr90ADxhZcH +j8wFXHIWCe+Nn89Zim53gvbzumspRj8Yb08RATruqpvwj3M/K5K6P92Lt3uqt6UB +W+tAcChHNNWHFIT5CtCV/rltJYe2c9k9yG6BZJeLWPYgq90dFkIqbdkiz/pVpmKS +WMMzvkaK+LEcv+M9eMUQPdPYWhwv67wAlUsdLVWyQtxoYcLPUY3Io+Smn8eE+Qz7 +bxkSX+59QB3eCXrNGKTFsBiNDlxl+9YH6U9XhwIGyHlnBgN/79ts0ZutIpOibIWg +WPc9Cp5nkjjQl/4y8RSea8KSlkmM9YeTEo8cEL57XXOr1OO8UEPn/Ogoo5TI7JXL +jGh4evOcfWbiXZbn9kGshq1Kmv+lhN5IZ8QJY0s5Ze1eURnu0zlqKvFe3PxDxHV7 ++PaM8MneRkT5B8QgC7prh/yJ0KEI2MyIcYP73fw8cOLTXenw0bpmKLLfxu8mSx4M +VEDqeZJUb/XwsZTd9VT+42p4YT/6wRAe9eU3zA9wKh4Sr96vUGTPktXcpxCjoBre +3IaF/6aeyRQn91Ps9XmOc0/KSxZmHMxWv2btVc4oLHawnyRlLXXT7OSG4FFR7eE8 +IRoCCSip6YnIflp1v2n1f/07SzfKtrtVdiW1u2lbBJtwuzN/h8TtwRJan5bKWV/6 +-----END RSA PRIVATE KEY----- diff --git a/internal/certs/data/privated.pem b/internal/certs/data/privated.pem deleted file mode 100644 index 354598a..0000000 --- a/internal/certs/data/privated.pem +++ /dev/null @@ -1,42 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -Proc-Type: 4,ENCRYPTED -DEK-Info: DES-CBC,054A583F6C90570F - -tlGw8qlO25FaQdRLkai5L1JHWz/5fC4zd3qFISWssYH2FEnz8yfYsCoRLivVYhJB -fswOTj9h5b1RYRsWfIwCGfyNeOj8hkQrLwCW607vbhydGGJ4xc5RBF9MK0QCjSNT -r8myedNyfI4nm5enNVFDqYsqAc7cA3m1qw+QsAhPOrASDTp5svHR7g9+T6P5GDHm -B79nap02kfmodC7ytmWDBEclJ45Y19LOucN0+Nl6JgKkQEfWB/p2s2kGAGY1Of3X -/ERPOqeqZdFSdPDyX+mrzjGVhypgjBaz7XRh8OSeW8UP70rE+9aZKn9fIs2NyYMH -wwCElUmFV1Ye+/JtE4+Rcu6pG7NrX1rAC+pqPZaF8PT/kEuawiwrMuU0RP/8Y6mn -PRZZGZhXwBcfWPDN+JIj7e1NAXynwP/d4Pc4nb1O6EG3/Yip+F9NNaNbEfS4z9eV -Se7Gr/ySwxFhww9KhMtFYhkb6DVzy7StXpDqDmLhaF+qGCl86XRzZHho6EwQi+9r -c3VXbgogbjwIP8OgAKIZLuMxETZb0rvOr87sMAiqWRx+gRhryNniNr70anY8Vkpl -jcw6SJdqWuvOGaKjxWgdcHOzHdISEu/W6z8euTzMxX6/C7hBrKT8Edt71Jha26a5 -ZZNDH2XoqDphelfCbrARhw4P++KcnhPsY2da5cJ4021dfwXQGbGjcW1EAR3tCP/U -NKWc8Wm4dzuQSMqJERbWlXL8/UuvtyJR8VgNueg8EAHXCWBCS9i1i06gla9gPbdy -erhMDtUsJepFPDZVuqvm0dIjBaldl+74FHnPQ6+qFHXy6f71bGOmbonspnApqoeP -gc4zB65Nv+ws//XfdgwHhmtUkWS2ANPNQhU9o92l8XlqKicGC72dEEsR2TMS7fEW -K9/d06ZGu83FEXL43OXN79JmkpblonCWRgyVF7WPGufm+dtmR5zlIQruW2FJVwPZ -QmOioJYlSopOztyyBIuhZaNwVDQgoFtwHKRWAUseodzmHuPpvWCBjlL4hebJ7O0T -HGHGddqam3IPmyradhk0o1Qb54uk9rrzKWjcOEw850mJt3DnkHRNRgY96Gg0fA+m -+UxEOuGPvOudOMtC32vDKwAZ9eGgxAKea/kvaLFdPqwiq3B+IBetjSYGZ2kxVOAD -K8rHH6bnzrrasKHfOIBpw4MsiAG19sW1fFL61v5OXTcLOEQ/UVC8WinSj3JK894O -XjETyg8zvH+bYdlv9T2SGvAAzv1bJ3Iw9kb2VK0ZgwfwQgKpCDe6PEFLP7K2NNdF -zSw1GHOiDewsMD7VSfkmtevhzTOcQd/3uoyn/5ftcvcbqI4CGxP6kOxmul3NdfYl -insi95+IuhkSUQL02AdkI3SQhSnfmFRZSsy6JTXSN/7XOOzRFyMJcR1WlXOKFpt9 -G/bYGjVmfxtRqH4ZO7irCPiM+ZudXvPCl5VhZReBsJeEJcNuR36QTJIL3RQHyKTD -9Z12PegrgPXDgkSns1s8phTu+GygIEh67yLPbPYohYYbJUOkab7Il3JauihnuMSP -2BDDbwdvL1V7TQCmnopNb1srZj3q/1eWKmik2U1kvc78c3W03NC5wFETic2QCM9z -u/IaKAjO/kvSB8+ClSYaZDVLuBgUHf0DSG9cb5eoPqFt3t4zuWQhQjJR1YlLtQsJ -YSQFf0WqGj6sA2+AIy6Fv3oitlOPtRi/2seZ8ACSqxbwUFf3to8ZA3rJNoaYLvsT -sz++DrA8oHr4eDOiCoLeU6MLNiUvB6RGtjDwhQDh2LoJJyAdh9wB3vaAmEJ1u3o4 -cGyTCxbbkxRCWhMWW4NJbvdZORYhhhIu+TH5DaLgsZS1n+UF/amKQ0m8sj968Uo/ -w05QBNm/F3zg5dpzyW7uEfti8DaP/apDcf1dHSpk9ERkJ/QSIdgzGmrROQvh2tF/ -nvubXXMAex0tXFS6eyIZVgkT1S5eF001DsxIlp/jY6oFUYHquMcOQkyRAvUTvLO1 -pkexrPYrmx/alP71nNrBfixSTHMuPVb2jC38ElzllgxHfaaI5Q1hef4lVaErNaQ3 -m1hvE7dYkNomTt9fu/LHaxtw/P1eBlL44QcfqdqL67ROES+fB27d8vbajm1EQraw -QUoY+NM5KeQyKeRPWxDVQwAv02Lof/FSiB01yNqrzmRojtTykKB5VrnIA1DDP2vI -SoZjPZOSIJHh3qlDaKxlGOQD9Wp4OtIPLqxpBmRgGcq2AVtm57jRAF634nTGvB+N -7fvMpBay3EZy3sauM4MZk7bytJKK6huQjmER+GM/F/Wyw28L7rewK8ukPKx8Wybc -ljVLrduRPt97JH4WWejy+k5vv4LHWJLsGGU474YHGMXF2VE3kJ3JKj8Wm5gS6p/p ------END RSA PRIVATE KEY----- diff --git a/internal/certs/data/testclient.crt b/internal/certs/data/testclient.crt index f46f386..6cfd9fa 100644 --- a/internal/certs/data/testclient.crt +++ b/internal/certs/data/testclient.crt @@ -1,27 +1,26 @@ -----BEGIN CERTIFICATE----- -MIIEkDCCAvigAwIBAgIBFDANBgkqhkiG9w0BAQsFADBKMQ8wDQYDVQQDEwZUZXN0 -ZXIxKjAoBgNVBAoTIUNTQUYgVG9vbHMgRGV2ZWxvcG1lbnQgKGludGVybmFsKTEL -MAkGA1UEBhMCREUwHhcNMjMwOTA0MDcyMjAzWhcNMjMxMDI0MDcyMjAzWjBVMRow -GAYDVQQDExFUTFMgVGVzdCBDbGllbnQgMTEqMCgGA1UEChMhQ1NBRiBUb29scyBE -ZXZlbG9wbWVudCAoaW50ZXJuYWwpMQswCQYDVQQGEwJERTCCAaIwDQYJKoZIhvcN -AQEBBQADggGPADCCAYoCggGBAN0vZbLXtRzd61rR8Hos0BGnqCaJXIwGARwxJojM -yxASFT+KeC4QDRkgRrK6OY4k/i7TEHuUGk/Bm754++554wmmhDqv1Q4+VhhR1K/J -Az/HVZNTAR1rPKwG82lyEpPxlRNZg/QtF9DqQSoSkL/fJLs+rq4zlKozXzREauZ5 -Be8So1dXRZfMVUMDgtk+IX8+iCeZisiWfv62ttQ0EiuiXLagd6ruEuoCSVi2tVsw -sC/Hp8AI2Ro56mmHiWthuae1H8yDWUFLSe9AQW65qC/xVUgo/nMpK2BYVFKb70TM -jl/dZM0Qn1tdiNyqCkbIhXjklZvZYhO+15TPkgDXDsqRUjpTrLZXLGrD6XIxCRLZ -GY6YrUfsFTjUC6JrUrAR8zY7SLsYN5sUmFUSMpJnI+T/SD4p/0CXrKrbMOjWQqz6 -FX/WHPxvswGKHk5zHYGHrzx7OKmfVa6gzUgZSfOHj2xOOR2Un9DwNavIrmSCWYXK -Zqig5qDyfzBvlXWEio/5GrDwgQIDAQABo3YwdDAMBgNVHRMBAf8EAjAAMBMGA1Ud -JQQMMAoGCCsGAQUFBwMCMA8GA1UdDwEB/wQFAwMHoAAwHQYDVR0OBBYEFKrFhODj -TKCopb+WQa29PsHR4HXgMB8GA1UdIwQYMBaAFI6GhktAq9L2uRChC9LcXeedKiUg -MA0GCSqGSIb3DQEBCwUAA4IBgQAbUDaIkmubooDde7BpZQx742BsPg4IN68bIg9A -3jI9codx9c8l9ROvZ/7FeRNXzhYrQUwzcKpwtQ1mB7kM85oXaTLxrtnkZAO2fFSb -8RA6QjOrnOvewWaO3moCZaPnN1wWtlnUev2tD7D2Tz/f20dE2wbDV0BGb8bU4eGI -UVgzYrMh0MHaC8LKoXUWP97jp/p+9CG4D2S1CmpzP2Nm1dS03oj4UHIUtamjivYY -vOeoKATXmj59lgYqqoAVbTH6f4mZlZGmzUhRxK6hck7xBdiXAwfta72m4WzE7HRh -nHAgO5aVWb6zltvVDJhYumB9Itv+LI7uU8fF9Uyc65SZ2BevxgikoDNxTx0oNr+4 -hExQhJfKuPFF2NI1N2tPYJT53Cek/ZJfjX3TyBneqehthtRqoAIIEaF/QlXqzJIi -G66YFC3xFlLmaQh52DJkF2+hzcPhFTVQv3yCirGLUSS9Nm7vTO2wnnW5arZazSV+ -enRZb3oiVYFVDh0Hymz9g5VraMw= +MIIEeDCCAuCgAwIBAgIUTqTcNqmr8Ou/MpL1AUnM/3gcoUkwDQYJKoZIhvcNAQEL +BQAwSjELMAkGA1UEBhMCREUxKjAoBgNVBAoTIUNTQUYgVG9vbHMgRGV2ZWxvcG1l +bnQgKGludGVybmFsKTEPMA0GA1UEAxMGVGVzdGVyMCAXDTI1MTAxNzEwMjIzNloY +DzIxMjUwOTIzMTAyMjM2WjBKMQswCQYDVQQGEwJERTEqMCgGA1UEChMhQ1NBRiBU +b29scyBEZXZlbG9wbWVudCAoaW50ZXJuYWwpMQ8wDQYDVQQDEwZUZXN0ZXIwggGi +MA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDBN4fIBbwuGJXjXoa6F7e4Zzin +Yd9EB4nt5TkNoMkRgQe0JIJ+t1/lS/xlI7ATxNjUdybnYwCrEfDvy8XGwN6te+Xh +dz6HKDWPijW+ritQW9kouxJJSpna95L8SqU4tjdfyL/2X9E/7j3VYw1//zcmhLJg +1Os0+JHPcPuj1vmwLa1v7eGTCNlt0K8DbrlhPlteJB3hWolNIoVDjRemZFmqwUeV +GZ/XJos7OTB07p08yCOFhLl9jXCgEDDkKmcnAil3YhjudlEGSjdzFLskVD4xrtQ5 +GsbdJHyHhcUdgh+vqX2bFSklwdwVil1qIUEHnxpcRMaluZQ4u1tCgNhKNQHrJzVQ +n1aRVAYdX1PxfoIb5wt0+25MiVw8y8EcrMH97Ss26eNAtLeHZNrY9alqx/Cs8gOi +I8wA2Nga138tZuCJRXsDOnom9RrtdPLajhSb7n33Iq8ZDhYVGEIm2pc5MJxaI53V +e2WhmemFPfYwUAtzdGgwrBoY9MechdtNLGZqHxECAwEAAaNUMFIwDAYDVR0TAQH/ +BAIwADATBgNVHSUEDDAKBggrBgEFBQcDAjAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0O +BBYEFBVaa/ovyPpbk/8nlmuISWB2/t8xMA0GCSqGSIb3DQEBCwUAA4IBgQC8EeDx +qipM7bAFxkAOmvhNAjodKXRCWKhatD8HryeINOPgWajzLlHj/PCnULulhaFO1viA ++iBBKbHb+7LImb/owlNVu8iYDh/xBXmLrOHyd12K8dyN471iTBrskQwSCnYd6e/p +4i0hhNj5JidOgA6swjt9j4X7/IgsvXexLIAhqgQDSsKQpPK17E9IB+d5p3UHU71w +Ob1mGIZ2j/GJnC6YmGFPqIZZ+cy3aVOypWf8RVZYPTFCz43ZuC70cP3kl2io75Rp +rWUNKXU+yUdBphHN6KJXUmlH4T9yqXKqnxK+9CnVC/CTlucF9VpktN7wfVxVPsrY +L79iys+FLPKrDkqcjpIJ2l/n/ugcUcXvN477qFCGbRY/3tB3Dmf4AvMPpTsStkXw +Ld+xAHog8upjVGsmXODX4sKjRMIFLIHbM01Iw0ECdKoKIMwjFGenwGmpBZA/Pfxe +AXBejd9KD0stCfHuKqx7Iu5N7Fg8BCLzmcSyoOmwJEo+Z3Z42IfSjOX8rQQ= -----END CERTIFICATE----- diff --git a/internal/certs/data/testclientkey.pem b/internal/certs/data/testclientkey.pem index ce2726e..4e8b564 100644 --- a/internal/certs/data/testclientkey.pem +++ b/internal/certs/data/testclientkey.pem @@ -3,180 +3,180 @@ Public Key Info: Key Security Level: High (3072 bits) modulus: - 00:dd:2f:65:b2:d7:b5:1c:dd:eb:5a:d1:f0:7a:2c:d0 - 11:a7:a8:26:89:5c:8c:06:01:1c:31:26:88:cc:cb:10 - 12:15:3f:8a:78:2e:10:0d:19:20:46:b2:ba:39:8e:24 - fe:2e:d3:10:7b:94:1a:4f:c1:9b:be:78:fb:ee:79:e3 - 09:a6:84:3a:af:d5:0e:3e:56:18:51:d4:af:c9:03:3f - c7:55:93:53:01:1d:6b:3c:ac:06:f3:69:72:12:93:f1 - 95:13:59:83:f4:2d:17:d0:ea:41:2a:12:90:bf:df:24 - bb:3e:ae:ae:33:94:aa:33:5f:34:44:6a:e6:79:05:ef - 12:a3:57:57:45:97:cc:55:43:03:82:d9:3e:21:7f:3e - 88:27:99:8a:c8:96:7e:fe:b6:b6:d4:34:12:2b:a2:5c - b6:a0:77:aa:ee:12:ea:02:49:58:b6:b5:5b:30:b0:2f - c7:a7:c0:08:d9:1a:39:ea:69:87:89:6b:61:b9:a7:b5 - 1f:cc:83:59:41:4b:49:ef:40:41:6e:b9:a8:2f:f1:55 - 48:28:fe:73:29:2b:60:58:54:52:9b:ef:44:cc:8e:5f - dd:64:cd:10:9f:5b:5d:88:dc:aa:0a:46:c8:85:78:e4 - 95:9b:d9:62:13:be:d7:94:cf:92:00:d7:0e:ca:91:52 - 3a:53:ac:b6:57:2c:6a:c3:e9:72:31:09:12:d9:19:8e - 98:ad:47:ec:15:38:d4:0b:a2:6b:52:b0:11:f3:36:3b - 48:bb:18:37:9b:14:98:55:12:32:92:67:23:e4:ff:48 - 3e:29:ff:40:97:ac:aa:db:30:e8:d6:42:ac:fa:15:7f - d6:1c:fc:6f:b3:01:8a:1e:4e:73:1d:81:87:af:3c:7b - 38:a9:9f:55:ae:a0:cd:48:19:49:f3:87:8f:6c:4e:39 - 1d:94:9f:d0:f0:35:ab:c8:ae:64:82:59:85:ca:66:a8 - a0:e6:a0:f2:7f:30:6f:95:75:84:8a:8f:f9:1a:b0:f0 - 81: + 00:c1:37:87:c8:05:bc:2e:18:95:e3:5e:86:ba:17:b7 + b8:67:38:a7:61:df:44:07:89:ed:e5:39:0d:a0:c9:11 + 81:07:b4:24:82:7e:b7:5f:e5:4b:fc:65:23:b0:13:c4 + d8:d4:77:26:e7:63:00:ab:11:f0:ef:cb:c5:c6:c0:de + ad:7b:e5:e1:77:3e:87:28:35:8f:8a:35:be:ae:2b:50 + 5b:d9:28:bb:12:49:4a:99:da:f7:92:fc:4a:a5:38:b6 + 37:5f:c8:bf:f6:5f:d1:3f:ee:3d:d5:63:0d:7f:ff:37 + 26:84:b2:60:d4:eb:34:f8:91:cf:70:fb:a3:d6:f9:b0 + 2d:ad:6f:ed:e1:93:08:d9:6d:d0:af:03:6e:b9:61:3e + 5b:5e:24:1d:e1:5a:89:4d:22:85:43:8d:17:a6:64:59 + aa:c1:47:95:19:9f:d7:26:8b:3b:39:30:74:ee:9d:3c + c8:23:85:84:b9:7d:8d:70:a0:10:30:e4:2a:67:27:02 + 29:77:62:18:ee:76:51:06:4a:37:73:14:bb:24:54:3e + 31:ae:d4:39:1a:c6:dd:24:7c:87:85:c5:1d:82:1f:af + a9:7d:9b:15:29:25:c1:dc:15:8a:5d:6a:21:41:07:9f + 1a:5c:44:c6:a5:b9:94:38:bb:5b:42:80:d8:4a:35:01 + eb:27:35:50:9f:56:91:54:06:1d:5f:53:f1:7e:82:1b + e7:0b:74:fb:6e:4c:89:5c:3c:cb:c1:1c:ac:c1:fd:ed + 2b:36:e9:e3:40:b4:b7:87:64:da:d8:f5:a9:6a:c7:f0 + ac:f2:03:a2:23:cc:00:d8:d8:1a:d7:7f:2d:66:e0:89 + 45:7b:03:3a:7a:26:f5:1a:ed:74:f2:da:8e:14:9b:ee + 7d:f7:22:af:19:0e:16:15:18:42:26:da:97:39:30:9c + 5a:23:9d:d5:7b:65:a1:99:e9:85:3d:f6:30:50:0b:73 + 74:68:30:ac:1a:18:f4:c7:9c:85:db:4d:2c:66:6a:1f + 11: public exponent: 01:00:01: private exponent: - 14:ff:c0:f9:ff:bc:b4:26:e5:87:53:d3:2e:e6:3e:42 - ce:d6:0a:02:94:84:be:b5:30:46:02:50:8e:90:e0:cf - b6:b0:b7:a6:bd:48:cc:d5:8b:d8:ea:72:ff:af:dd:17 - 3c:be:d1:1b:ca:6d:cd:10:a6:86:a8:d9:d2:44:44:27 - d0:65:51:65:0c:27:34:07:dc:7b:38:64:10:03:7c:f4 - a1:cd:40:de:24:3a:e0:21:bc:ef:33:1d:9f:61:e8:57 - ac:e4:9c:c0:7b:df:7c:f8:20:83:ac:0b:8e:0b:d3:62 - eb:8a:8e:03:5b:a3:e5:08:ae:df:a7:fe:85:92:e8:a5 - ae:58:46:72:d6:fc:91:43:b1:7b:a4:c0:5f:51:c3:50 - 0d:e2:67:e8:af:51:13:41:a9:8d:ef:fb:a1:a4:e2:84 - 7c:2b:a0:50:c5:fe:ed:84:a5:25:83:86:4a:d3:0f:56 - 37:38:e6:1e:26:7d:45:22:0b:ba:22:35:be:f8:8b:1b - 72:90:13:c4:1f:c5:d1:34:b5:0e:b2:ee:f7:e1:b9:5e - a2:29:8d:f9:6e:23:4b:50:8f:35:c8:a9:f3:d2:1f:dd - ce:a0:96:50:2d:2e:af:cf:b5:e1:20:e7:e9:d2:49:ed - b5:0e:5b:3e:d1:4b:f1:fa:c2:73:3a:1b:51:34:7e:75 - 30:06:d2:47:d2:a8:2a:45:be:16:fb:8f:63:84:85:b7 - bf:f7:c4:c5:3d:95:56:8c:d1:02:7f:58:ac:4d:11:7b - c5:55:f3:c8:4e:d7:d9:aa:62:b0:e3:1e:04:5c:97:d1 - ca:e2:71:aa:8b:33:b4:34:e9:04:d4:70:7c:f4:cb:57 - 19:c1:03:23:f4:bc:4d:91:8f:b2:9a:99:1c:6c:81:2d - 4d:2d:e9:a1:e3:ce:e3:c9:62:52:89:1f:47:86:61:f1 - dd:bc:46:8d:79:0a:99:9d:aa:4b:a9:0a:72:54:db:dc - ae:48:be:60:4a:73:99:d8:3c:9e:07:78:05:df:87:39 + 70:0e:fd:af:d3:2b:ad:6c:52:d9:f8:43:99:00:12:6c + 5f:69:2b:22:87:33:54:4f:f9:69:fc:e9:db:7b:61:ac + 7c:c4:4c:7c:66:73:81:a9:61:a5:73:1e:fc:8a:aa:9a + ba:b6:94:18:94:81:99:b5:a1:0f:e2:15:c5:4c:ac:98 + df:07:96:f8:ea:89:c6:97:31:b5:8d:b0:16:21:46:cc + ce:28:62:3e:9b:c5:29:70:26:2f:d8:24:8e:a8:52:7d + d1:0e:83:ce:a7:09:9b:d3:57:87:3f:98:5f:c8:ab:ba + aa:31:2e:19:ae:84:1d:39:ab:9e:b2:42:f6:75:ff:68 + ae:73:00:fa:d7:a4:c5:3d:7c:4f:54:65:4e:1c:88:e6 + c2:b5:9d:a2:ca:38:61:45:09:17:01:68:5a:f7:4e:4d + cb:24:f1:e3:57:a1:97:58:1e:b3:ef:57:91:e0:1d:95 + 51:8c:a9:4a:4e:f7:cd:fe:f7:04:f3:ff:67:ad:e7:01 + 14:dc:7e:e4:00:c0:38:51:2f:04:db:39:6c:f1:1b:a4 + a5:f1:b4:5a:c3:17:d2:41:1a:5a:b5:f3:69:3b:b8:ba + 7b:59:96:d7:b2:c2:2c:9a:dd:e9:42:ce:fb:c8:22:fc + c5:33:97:6d:68:89:cd:e5:bc:2e:cc:9d:23:65:18:04 + 0c:83:b6:35:7e:16:09:96:d1:48:61:31:b1:ce:f8:50 + f0:14:ba:57:2f:02:1b:61:9c:bc:81:c1:ef:b3:bf:2f + fb:36:af:18:8c:90:40:55:5a:fd:a7:d4:ed:3b:94:a6 + df:ab:eb:6c:d2:bc:e3:80:7e:d5:06:21:28:9b:04:65 + b5:cc:04:b2:44:e9:2d:3b:7d:de:24:90:8d:fb:90:2d + 40:17:51:cf:a7:fa:ee:54:89:8f:c0:f4:e4:c2:bd:44 + 94:1d:8d:fc:b7:d7:05:4d:46:dc:63:1f:7f:d8:b4:8b + 11:db:37:be:4d:e9:2b:33:b9:6b:8c:a7:f0:43:56:c5 prime1: - 00:e9:63:0f:d7:49:31:27:a8:36:fe:95:bd:8d:05:c1 - 35:48:2e:03:4f:a6:57:54:3a:a4:95:3f:8e:9f:28:7c - d2:df:af:54:36:9e:7c:9f:c3:b9:64:8f:c0:b0:96:3c - aa:01:f6:9a:be:83:e2:85:20:0d:33:de:88:97:af:6f - be:3f:53:5a:a3:77:02:fd:81:17:91:3b:b2:2d:ab:78 - db:d9:43:db:04:69:82:61:30:e4:96:ac:88:8b:f6:3f - 56:c4:49:fd:d5:e5:8c:9d:30:ad:cf:d9:8d:5c:87:b5 - 27:4b:09:8e:19:ed:e2:11:3f:69:b2:47:be:70:39:11 - 41:a3:db:bb:b9:0e:e4:7b:50:d0:d2:c2:89:81:36:b9 - 6b:a6:fe:94:5b:06:66:e6:ed:86:52:42:5e:a9:0e:18 - db:18:f9:14:21:3d:e0:3c:8d:79:c3:f5:d2:cc:51:65 - fb:1c:49:ed:0a:d5:33:99:34:16:f9:1d:68:4a:78:da - 5f: + 00:f0:57:25:fd:aa:7e:98:13:08:28:99:16:eb:af:2e + 22:f6:e6:d7:bd:df:49:57:17:71:bf:21:ba:bf:75:54 + 5a:38:92:64:8c:4a:10:d4:4f:77:18:44:c2:79:f0:9d + 72:26:2e:9a:27:5d:e7:41:0b:c6:65:cb:fa:89:6d:9b + fb:87:78:e2:87:22:d4:92:21:f5:3a:57:fa:b0:bf:bb + 66:a2:bf:43:af:e8:58:b4:e2:a1:ed:97:62:09:0d:49 + ca:4c:99:a2:f4:f3:31:df:80:8e:56:be:64:9d:72:59 + ef:e9:db:4d:a3:e2:cf:79:1e:99:89:b2:f1:e3:2d:bc + 8f:a0:2a:2f:a6:f0:21:18:2d:f1:57:20:55:c1:c9:18 + c1:64:c6:9c:00:df:b2:54:55:8d:fe:d3:46:a0:5c:2e + f8:f7:10:b6:27:3a:4a:79:a1:14:b1:0c:c3:72:5b:2b + 66:d6:85:2c:7e:58:72:eb:33:62:73:34:e5:38:87:2e + 17: prime2: - 00:f2:9d:ae:5f:bd:b7:a3:87:a7:8d:30:46:06:8b:15 - a9:e5:a9:58:1c:2b:3a:7e:78:35:36:56:31:42:df:46 - 87:e8:57:0d:6e:99:de:cf:fb:a8:72:16:71:4b:b3:ad - ed:74:07:cb:cf:7d:2b:12:89:66:c4:0f:8a:ea:e3:37 - 17:2c:75:92:11:7a:a6:da:29:24:33:9b:69:c2:64:68 - 03:db:31:de:fe:1d:a2:4d:9d:91:9f:f0:50:b8:8f:d0 - 22:11:b9:b0:95:98:5e:65:bf:45:97:9b:35:f2:98:27 - 46:7c:b2:86:eb:7b:8b:57:f2:c3:49:47:7d:01:4a:9a - b0:e6:67:05:e5:61:7a:ab:63:c8:cb:d8:44:69:88:72 - a5:a9:60:89:60:df:e6:d9:4d:16:2b:35:7b:20:00:f3 - 3c:d1:78:f9:22:eb:48:c3:7f:78:63:e6:34:60:48:30 - 66:02:bb:38:c2:94:2e:b9:86:b2:2f:9a:4f:17:7f:e1 - 1f: + 00:cd:ce:5d:fb:04:16:34:f4:de:02:7d:00:07:3e:b0 + 94:8c:f4:3a:62:05:37:1a:4f:d8:40:2e:31:11:07:77 + 09:8b:bd:76:6e:85:b9:43:df:3f:86:cb:db:6d:fe:c6 + 4c:ca:e1:16:ce:5c:0e:e1:b1:10:0d:8d:48:99:d7:43 + 7f:6c:b6:20:b2:cd:0c:56:26:02:18:81:e1:67:e5:cd + b3:66:1e:77:dc:49:6a:5d:8c:9c:0e:24:14:3e:a1:4a + 7e:cf:72:e6:e4:03:e6:38:41:fa:2b:91:71:6c:33:b0 + ec:07:3a:be:5b:f8:74:f5:e4:1f:9c:c4:d0:d4:75:a8 + 35:09:05:0f:7f:54:4e:2a:bc:cc:92:de:1e:f4:74:8a + 56:36:e0:b1:37:cf:b3:9c:57:05:76:59:69:c3:03:de + c2:33:0c:c4:a1:4f:2a:b8:3c:20:63:c9:58:96:1a:e2 + 62:ce:bf:fb:a9:51:b0:66:99:35:d6:d2:60:59:72:bd + 17: coefficient: - 00:93:3e:7c:b9:ea:87:52:37:fa:d5:0a:36:fb:e1:d0 - fc:62:4d:00:0b:ad:a8:fb:bd:34:53:96:c2:6c:a1:6a - 49:b7:a0:24:33:16:95:79:14:ac:bb:75:8d:78:e9:10 - fa:be:44:60:58:94:4a:9c:ba:64:1d:86:27:8b:7f:51 - 4d:80:b0:ff:7a:91:c0:4d:a4:aa:d1:f1:79:7d:8f:71 - 49:12:73:d4:44:5f:0c:2e:55:a6:d9:13:b8:3b:e5:dc - e1:14:98:7e:eb:5b:60:ad:d7:4b:da:c0:d8:3f:bf:70 - 92:53:8c:31:6a:8b:61:5e:a3:7d:ff:84:2c:7d:ed:9f - 74:29:9a:e7:14:fb:c3:ab:8e:9f:60:6a:98:ab:86:0b - ea:fb:ff:20:2f:3b:a7:76:03:3a:55:bb:b2:c6:9c:b5 - 66:36:b8:1c:7f:9b:b6:62:89:ff:6a:d6:35:58:0b:f0 - 55:27:01:f0:67:8d:88:3f:74:48:3d:bf:8c:fc:05:62 - 47: + 33:6a:05:3e:1e:46:46:58:e2:61:38:6a:c2:8f:77:a2 + 27:b7:19:38:75:40:d6:8c:87:bc:65:a6:24:c3:97:e5 + ef:70:1b:2c:4e:9c:08:ca:1d:eb:97:11:74:14:bb:99 + de:22:a1:6e:bc:6c:c6:25:98:8a:8e:17:f4:f9:4d:a3 + 1d:01:5e:26:0e:b4:e8:1c:aa:06:7c:66:b1:89:5a:b4 + 82:65:d1:bf:20:cb:b2:57:a8:af:7f:00:07:00:7c:5e + d4:09:60:0c:0a:6e:a8:e1:16:1b:04:95:b1:bc:2b:35 + ad:80:78:0a:0a:1d:5f:c9:cc:24:3a:5e:20:03:50:44 + b8:b0:f3:f1:17:ff:41:b8:5d:56:9b:1c:f1:e6:2b:c6 + ba:a2:8c:18:25:8c:d5:90:f1:28:66:29:bb:40:3d:b2 + f9:65:99:2e:b7:1b:e3:d0:d2:1a:d7:96:70:cc:f6:74 + c5:2e:bf:f5:c9:60:c0:ff:38:f8:a8:db:1a:7d:6a:4e + exp1: - 00:99:16:2d:91:dd:a4:ac:8a:9e:68:27:f8:89:c4:38 - 93:a6:a0:e7:f3:1a:fd:35:76:b1:f6:64:16:3d:37:e5 - 88:bc:c8:d8:c8:6a:f4:fc:26:fa:38:88:42:b0:92:1b - 80:b8:80:f5:c7:f9:e2:5f:c8:42:60:bf:9b:81:43:c6 - 5c:58:55:68:a2:c8:b1:e1:6f:07:f2:6f:e1:d4:2b:21 - bf:b3:a7:da:c5:ee:1f:63:79:1a:b7:ea:bc:36:72:73 - e1:8a:27:ae:a4:db:49:7c:e2:2d:60:a5:27:20:86:b3 - c0:ee:6b:7a:16:6f:ff:55:a8:ee:bf:ce:67:90:5d:1e - 80:9b:e6:ca:1f:fd:30:c9:e2:9c:d7:62:5b:a7:b2:29 - b5:ff:78:06:00:1f:16:e8:6a:ed:2c:8f:f4:5f:97:ab - 9e:2b:a7:56:18:e7:e9:6a:4e:b2:8c:63:76:be:26:b6 - 6a:1c:88:31:40:65:d0:ce:b1:68:50:47:85:dd:33:a0 - a9: + 5c:1b:49:f7:f9:0b:23:04:c8:2f:a6:db:dd:de:f8:f3 + 75:63:ea:72:5d:cc:21:90:5e:8b:3d:45:f0:71:ea:ad + d8:d8:61:a8:52:0a:39:13:6b:34:e5:c5:12:2e:60:68 + 8a:b1:79:6a:74:d6:57:5b:47:e1:63:56:d4:ac:29:07 + 30:57:e7:98:9a:84:94:ac:66:ea:c1:24:d5:ef:e4:c5 + e4:c1:20:13:9e:1b:c0:d6:c9:ef:e0:00:36:2f:dd:83 + a5:ef:8b:40:0c:a3:a4:60:04:2c:c2:32:95:14:69:db + 43:e8:43:cc:f6:f3:44:1b:b2:03:cf:8c:5b:df:ff:4f + 9b:b6:0f:25:0f:09:df:d6:5b:93:64:54:f9:3b:34:3d + 89:7d:83:f3:e1:c6:da:03:1f:b3:f5:0c:30:10:a3:ff + cd:cf:9d:bf:52:db:8f:d9:67:b0:a2:8f:94:97:d3:fe + 49:60:28:39:13:74:97:26:ce:28:10:b1:78:04:76:69 + exp2: - 00:8d:b1:5f:7c:94:ed:62:39:40:b6:a9:a1:cc:02:80 - c5:77:d6:9e:19:dd:79:4d:11:61:6a:79:8e:4d:92:de - bb:53:0b:3c:52:02:d5:69:3c:7d:95:1b:dc:51:2d:00 - 00:35:0a:b4:92:5a:74:c4:5f:b0:c0:02:9f:cc:2c:a5 - 29:08:93:25:9a:c5:ba:1a:a1:7a:7e:15:5e:ff:e3:ea - 07:8e:85:a2:c9:60:7f:40:bb:2c:a8:6f:0e:85:ab:a0 - 0f:b5:b0:70:1b:fe:1f:eb:66:78:fb:60:ef:71:de:40 - d9:de:cb:d9:16:40:52:12:2c:3a:b7:5a:63:fc:54:18 - e2:05:bd:d7:68:ae:b4:98:d2:2f:1c:36:13:46:5b:25 - 31:f1:28:eb:32:c3:b1:2b:e9:e4:6f:99:cd:6d:d4:80 - 3a:5d:d0:3c:18:93:b7:2c:4e:0e:fe:b1:1c:97:ba:b1 - 61:72:68:eb:6e:60:62:a5:81:b0:21:33:0a:cc:1b:a8 - 5b: + 6e:6d:c5:d5:b3:8a:aa:dd:9c:e6:5e:e6:0d:fd:20:48 + 85:1d:62:da:47:8c:1a:8d:2f:2e:b8:da:51:15:dd:54 + 7c:eb:ab:49:80:6d:39:32:e7:e6:4f:2a:2d:6a:20:43 + 02:35:26:c4:91:76:d6:b8:e8:31:2d:57:00:5d:15:f5 + a0:82:55:27:3b:88:dc:0c:c6:e1:19:87:b5:f5:03:9b + b8:36:ae:ff:bf:50:d8:63:63:34:df:3d:11:a1:ff:d3 + ed:41:ed:0b:f9:df:a4:de:19:fb:18:ae:70:6d:88:08 + 0d:95:02:a1:5c:be:7d:55:eb:74:75:d2:cb:bd:5a:05 + 23:12:d9:0e:ec:50:88:f4:07:1c:e3:1c:5e:f4:cd:69 + 97:46:97:30:a8:3c:ea:ad:72:db:de:fc:35:cc:b4:d1 + 25:0d:3b:d0:86:27:18:f6:02:37:28:c9:64:b9:86:31 + 98:58:41:13:c8:26:4b:d6:f7:a1:8d:fe:6e:e0:76:ff + Public Key PIN: - pin-sha256:iFdBnKP/7hZCLdj7qqTtdNPFjpZGka259fSYvv3X02U= + pin-sha256:Zv2mSFRUYM7ofg5obMJJxhZpnuvO7gkCOlqfDK1gzks= Public Key ID: - sha256:8857419ca3ffee16422dd8fbaaa4ed74d3c58e964691adb9f5f498befdd7d365 - sha1:aac584e0e34ca0a8a5bf9641adbd3ec1d1e075e0 + sha256:66fda648545460cee87e0e686cc249c616699eebceee09023a5a9f0cad60ce4b + sha1:155a6bfa2fc8fa5b93ff27966b88496076fedf31 -----BEGIN RSA PRIVATE KEY----- -MIIG5QIBAAKCAYEA3S9lste1HN3rWtHweizQEaeoJolcjAYBHDEmiMzLEBIVP4p4 -LhANGSBGsro5jiT+LtMQe5QaT8Gbvnj77nnjCaaEOq/VDj5WGFHUr8kDP8dVk1MB -HWs8rAbzaXISk/GVE1mD9C0X0OpBKhKQv98kuz6urjOUqjNfNERq5nkF7xKjV1dF -l8xVQwOC2T4hfz6IJ5mKyJZ+/ra21DQSK6JctqB3qu4S6gJJWLa1WzCwL8enwAjZ -GjnqaYeJa2G5p7UfzINZQUtJ70BBbrmoL/FVSCj+cykrYFhUUpvvRMyOX91kzRCf -W12I3KoKRsiFeOSVm9liE77XlM+SANcOypFSOlOstlcsasPpcjEJEtkZjpitR+wV -ONQLomtSsBHzNjtIuxg3mxSYVRIykmcj5P9IPin/QJesqtsw6NZCrPoVf9Yc/G+z -AYoeTnMdgYevPHs4qZ9VrqDNSBlJ84ePbE45HZSf0PA1q8iuZIJZhcpmqKDmoPJ/ -MG+VdYSKj/kasPCBAgMBAAECggGAFP/A+f+8tCblh1PTLuY+Qs7WCgKUhL61MEYC -UI6Q4M+2sLemvUjM1YvY6nL/r90XPL7RG8ptzRCmhqjZ0kREJ9BlUWUMJzQH3Hs4 -ZBADfPShzUDeJDrgIbzvMx2fYehXrOScwHvffPggg6wLjgvTYuuKjgNbo+UIrt+n -/oWS6KWuWEZy1vyRQ7F7pMBfUcNQDeJn6K9RE0Gpje/7oaTihHwroFDF/u2EpSWD -hkrTD1Y3OOYeJn1FIgu6IjW++IsbcpATxB/F0TS1DrLu9+G5XqIpjfluI0tQjzXI -qfPSH93OoJZQLS6vz7XhIOfp0knttQ5bPtFL8frCczobUTR+dTAG0kfSqCpFvhb7 -j2OEhbe/98TFPZVWjNECf1isTRF7xVXzyE7X2apisOMeBFyX0cricaqLM7Q06QTU -cHz0y1cZwQMj9LxNkY+ympkcbIEtTS3poePO48liUokfR4Zh8d28Ro15Cpmdqkup -CnJU29yuSL5gSnOZ2DyeB3gF34c5AoHBAOljD9dJMSeoNv6VvY0FwTVILgNPpldU -OqSVP46fKHzS369UNp58n8O5ZI/AsJY8qgH2mr6D4oUgDTPeiJevb74/U1qjdwL9 -gReRO7Itq3jb2UPbBGmCYTDklqyIi/Y/VsRJ/dXljJ0wrc/ZjVyHtSdLCY4Z7eIR -P2myR75wORFBo9u7uQ7ke1DQ0sKJgTa5a6b+lFsGZubthlJCXqkOGNsY+RQhPeA8 -jXnD9dLMUWX7HEntCtUzmTQW+R1oSnjaXwKBwQDyna5fvbejh6eNMEYGixWp5alY -HCs6fng1NlYxQt9Gh+hXDW6Z3s/7qHIWcUuzre10B8vPfSsSiWbED4rq4zcXLHWS -EXqm2ikkM5tpwmRoA9sx3v4dok2dkZ/wULiP0CIRubCVmF5lv0WXmzXymCdGfLKG -63uLV/LDSUd9AUqasOZnBeVheqtjyMvYRGmIcqWpYIlg3+bZTRYrNXsgAPM80Xj5 -IutIw394Y+Y0YEgwZgK7OMKULrmGsi+aTxd/4R8CgcEAmRYtkd2krIqeaCf4icQ4 -k6ag5/Ma/TV2sfZkFj035Yi8yNjIavT8Jvo4iEKwkhuAuID1x/niX8hCYL+bgUPG -XFhVaKLIseFvB/Jv4dQrIb+zp9rF7h9jeRq36rw2cnPhiieupNtJfOItYKUnIIaz -wO5rehZv/1Wo7r/OZ5BdHoCb5sof/TDJ4pzXYlunsim1/3gGAB8W6GrtLI/0X5er -niunVhjn6WpOsoxjdr4mtmociDFAZdDOsWhQR4XdM6CpAoHBAI2xX3yU7WI5QLap -ocwCgMV31p4Z3XlNEWFqeY5Nkt67Uws8UgLVaTx9lRvcUS0AADUKtJJadMRfsMAC -n8wspSkIkyWaxboaoXp+FV7/4+oHjoWiyWB/QLssqG8OhaugD7WwcBv+H+tmePtg -73HeQNney9kWQFISLDq3WmP8VBjiBb3XaK60mNIvHDYTRlslMfEo6zLDsSvp5G+Z -zW3UgDpd0DwYk7csTg7+sRyXurFhcmjrbmBipYGwITMKzBuoWwKBwQCTPny56odS -N/rVCjb74dD8Yk0AC62o+700U5bCbKFqSbegJDMWlXkUrLt1jXjpEPq+RGBYlEqc -umQdhieLf1FNgLD/epHATaSq0fF5fY9xSRJz1ERfDC5VptkTuDvl3OEUmH7rW2Ct -10vawNg/v3CSU4wxaothXqN9/4Qsfe2fdCma5xT7w6uOn2BqmKuGC+r7/yAvO6d2 -AzpVu7LGnLVmNrgcf5u2Yon/atY1WAvwVScB8GeNiD90SD2/jPwFYkc= +MIIG4gIBAAKCAYEAwTeHyAW8LhiV416Guhe3uGc4p2HfRAeJ7eU5DaDJEYEHtCSC +frdf5Uv8ZSOwE8TY1Hcm52MAqxHw78vFxsDerXvl4Xc+hyg1j4o1vq4rUFvZKLsS +SUqZ2veS/EqlOLY3X8i/9l/RP+491WMNf/83JoSyYNTrNPiRz3D7o9b5sC2tb+3h +kwjZbdCvA265YT5bXiQd4VqJTSKFQ40XpmRZqsFHlRmf1yaLOzkwdO6dPMgjhYS5 +fY1woBAw5CpnJwIpd2IY7nZRBko3cxS7JFQ+Ma7UORrG3SR8h4XFHYIfr6l9mxUp +JcHcFYpdaiFBB58aXETGpbmUOLtbQoDYSjUB6yc1UJ9WkVQGHV9T8X6CG+cLdPtu +TIlcPMvBHKzB/e0rNunjQLS3h2Ta2PWpasfwrPIDoiPMANjYGtd/LWbgiUV7Azp6 +JvUa7XTy2o4Um+599yKvGQ4WFRhCJtqXOTCcWiOd1XtloZnphT32MFALc3RoMKwa +GPTHnIXbTSxmah8RAgMBAAECggGAcA79r9MrrWxS2fhDmQASbF9pKyKHM1RP+Wn8 +6dt7Yax8xEx8ZnOBqWGlcx78iqqauraUGJSBmbWhD+IVxUysmN8HlvjqicaXMbWN +sBYhRszOKGI+m8UpcCYv2CSOqFJ90Q6DzqcJm9NXhz+YX8iruqoxLhmuhB05q56y +QvZ1/2iucwD616TFPXxPVGVOHIjmwrWdoso4YUUJFwFoWvdOTcsk8eNXoZdYHrPv +V5HgHZVRjKlKTvfN/vcE8/9nrecBFNx+5ADAOFEvBNs5bPEbpKXxtFrDF9JBGlq1 +82k7uLp7WZbXssIsmt3pQs77yCL8xTOXbWiJzeW8LsydI2UYBAyDtjV+FgmW0Uhh +MbHO+FDwFLpXLwIbYZy8gcHvs78v+zavGIyQQFVa/afU7TuUpt+r62zSvOOAftUG +ISibBGW1zASyROktO33eJJCN+5AtQBdRz6f67lSJj8D05MK9RJQdjfy31wVNRtxj +H3/YtIsR2ze+TekrM7lrjKfwQ1bFAoHBAPBXJf2qfpgTCCiZFuuvLiL25te930lX +F3G/Ibq/dVRaOJJkjEoQ1E93GETCefCdciYumidd50ELxmXL+oltm/uHeOKHItSS +IfU6V/qwv7tmor9Dr+hYtOKh7ZdiCQ1JykyZovTzMd+Ajla+ZJ1yWe/p202j4s95 +HpmJsvHjLbyPoCovpvAhGC3xVyBVwckYwWTGnADfslRVjf7TRqBcLvj3ELYnOkp5 +oRSxDMNyWytm1oUsflhy6zNiczTlOIcuFwKBwQDNzl37BBY09N4CfQAHPrCUjPQ6 +YgU3Gk/YQC4xEQd3CYu9dm6FuUPfP4bL223+xkzK4RbOXA7hsRANjUiZ10N/bLYg +ss0MViYCGIHhZ+XNs2Yed9xJal2MnA4kFD6hSn7PcubkA+Y4QforkXFsM7DsBzq+ +W/h09eQfnMTQ1HWoNQkFD39UTiq8zJLeHvR0ilY24LE3z7OcVwV2WWnDA97CMwzE +oU8quDwgY8lYlhriYs6/+6lRsGaZNdbSYFlyvRcCgcBcG0n3+QsjBMgvptvd3vjz +dWPqcl3MIZBeiz1F8HHqrdjYYahSCjkTazTlxRIuYGiKsXlqdNZXW0fhY1bUrCkH +MFfnmJqElKxm6sEk1e/kxeTBIBOeG8DWye/gADYv3YOl74tADKOkYAQswjKVFGnb +Q+hDzPbzRBuyA8+MW9//T5u2DyUPCd/WW5NkVPk7ND2JfYPz4cbaAx+z9QwwEKP/ +zc+dv1Lbj9lnsKKPlJfT/klgKDkTdJcmzigQsXgEdmkCgcBubcXVs4qq3ZzmXuYN +/SBIhR1i2keMGo0vLrjaURXdVHzrq0mAbTky5+ZPKi1qIEMCNSbEkXbWuOgxLVcA +XRX1oIJVJzuI3AzG4RmHtfUDm7g2rv+/UNhjYzTfPRGh/9PtQe0L+d+k3hn7GK5w +bYgIDZUCoVy+fVXrdHXSy71aBSMS2Q7sUIj0BxzjHF70zWmXRpcwqDzqrXLb3vw1 +zLTRJQ070IYnGPYCNyjJZLmGMZhYQRPIJkvW96GN/m7gdv8CgcAzagU+HkZGWOJh +OGrCj3eiJ7cZOHVA1oyHvGWmJMOX5e9wGyxOnAjKHeuXEXQUu5neIqFuvGzGJZiK +jhf0+U2jHQFeJg606ByqBnxmsYlatIJl0b8gy7JXqK9/AAcAfF7UCWAMCm6o4RYb +BJWxvCs1rYB4CgodX8nMJDpeIANQRLiw8/EX/0G4XVabHPHmK8a6oowYJYzVkPEo +Zim7QD2y+WWZLrcb49DSGteWcMz2dMUuv/XJYMD/OPio2xp9ak4= -----END RSA PRIVATE KEY----- From f046ade489aff1e6629c33d6504243585edf0b65 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 12:09:41 +0200 Subject: [PATCH 158/176] change go.mod as first step towards go 1.25 raise minium version of go compatiblity to 1.24.9 and toolchain to be used to 1.25.3 --- go.mod | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/go.mod b/go.mod index 5a27126..1cd0acc 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,8 @@ module github.com/gocsaf/csaf/v3 -go 1.23.0 +go 1.24.9 -toolchain go1.24.4 +toolchain go1.25.3 require ( github.com/BurntSushi/toml v1.5.0 From fc012fa820d60dc7b18651dc65f1c753614a64cb Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 12:42:36 +0200 Subject: [PATCH 159/176] upgrade .github/workflows and documentation * update all .github/workflows/ to use the latest version of actions and the go versions accordingly. (Only some github actions use a floating tag for the major version.) * reduce places where the go versions are hardcoded: * refEr to docs/Development.md from README.md * use `go.mod` from itest.yml. --- .github/workflows/generate-markdown.yml | 4 ++-- .github/workflows/go-oldstable.yml | 4 ++-- .github/workflows/go.yml | 14 +++++++------- .github/workflows/itest.yml | 5 +++-- .github/workflows/release.yml | 8 +++++--- README.md | 3 ++- docs/Development.md | 2 +- 7 files changed, 22 insertions(+), 18 deletions(-) diff --git a/.github/workflows/generate-markdown.yml b/.github/workflows/generate-markdown.yml index a59c944..7d9aca0 100644 --- a/.github/workflows/generate-markdown.yml +++ b/.github/workflows/generate-markdown.yml @@ -13,8 +13,8 @@ jobs: auto-update-readme: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v5 - name: Markdown autodocs - uses: dineshsonachalam/markdown-autodocs@v1.0.4 + uses: dineshsonachalam/markdown-autodocs@v1.0.7 with: output_file_paths: '[./README.md, ./docs/*.md]' diff --git a/.github/workflows/go-oldstable.yml b/.github/workflows/go-oldstable.yml index fda6413..40eb8c2 100644 --- a/.github/workflows/go-oldstable.yml +++ b/.github/workflows/go-oldstable.yml @@ -12,10 +12,10 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: 'oldstable' diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index b3f5389..d3d9522 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -13,10 +13,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: "stable" @@ -27,12 +27,12 @@ jobs: run: go vet ./... - name: gofmt - uses: Jerome1337/gofmt-action@v1.0.4 + uses: Jerome1337/gofmt-action@v1.0.5 with: gofmt-flags: "-l -d" - name: Revive Action - uses: morphy2k/revive-action@v2.7.4 + uses: morphy2k/revive-action@v2 - name: Tests run: go test -v ./... @@ -46,17 +46,17 @@ jobs: pull-requests: write # Modver needs to write comments/status on PRs steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 # Modver needs full history for comparison - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: "stable" - name: Modver - uses: bobg/modver@v2.11.0 + uses: bobg/modver@v2.12.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} pull_request_url: https://github.com/${{ github.repository }}/pull/${{ github.event.number }} diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index a99c269..6d32009 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -6,9 +6,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: "^1.23.6" + go-version-file: "go.mod" + check-latest: true - name: Set up Node.js uses: actions/setup-node@v3 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f77c9e3..52406e8 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,17 +7,19 @@ on: jobs: releases-matrix: name: Release Go binaries + # use oldest available ubuntu to be compatible with more libc.so revs. runs-on: ubuntu-22.04 permissions: contents: write steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: '^1.23.6' + go-version: '^1.24.9' + check-latest: true - name: Build run: make dist diff --git a/README.md b/README.md index 897dfe0..54543a7 100644 --- a/README.md +++ b/README.md @@ -78,7 +78,8 @@ Download the binaries from the most recent release assets on Github. ### Build from sources -- A recent version of **Go** (1.23+) should be installed. [Go installation](https://go.dev/doc/install) +- Needs a [supported version](docs/Development.md) of **Go** to be installed. + [Go installation](https://go.dev/doc/install) - Clone the repository `git clone https://github.com/gocsaf/csaf.git ` diff --git a/docs/Development.md b/docs/Development.md index bc71c2c..f05d4d0 100644 --- a/docs/Development.md +++ b/docs/Development.md @@ -3,7 +3,7 @@ ## Supported Go versions We support the latest version and the one before -the latest version of Go (currently 1.23 and 1.24). +the latest version of Go (currently 1.24 and 1.25). ## Generated files From 223570ac9bde5648e59044a05edde0fb80981fac Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 13:00:33 +0200 Subject: [PATCH 160/176] fix itest.yml: checkout before refer to go.mod --- .github/workflows/itest.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index 6d32009..878d1a3 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -5,6 +5,9 @@ jobs: build: runs-on: ubuntu-latest steps: + - name: Checkout + uses: actions/checkout@v5 + - name: Set up Go uses: actions/setup-go@v6 with: @@ -12,13 +15,10 @@ jobs: check-latest: true - name: Set up Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v6 with: node-version: 24 - - name: Checkout - uses: actions/checkout@v4 - - name: Execute the scripts run: | sudo apt update From ef44c92f8b763e467f7b4cdc834c41f79155d438 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 13:17:28 +0200 Subject: [PATCH 161/176] improve code cleanness: use format string w error and thus makes newer go test versions happy --- cmd/csaf_checker/processor.go | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index e427b44..584684c 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -254,14 +254,12 @@ func (p *processor) run(domains []string) (*Report, error) { if !p.checkProviderMetadata(d) { // We need to fail the domain if the PMD cannot be parsed. p.badProviderMetadata.use() - message := fmt.Sprintf("Could not parse the Provider-Metadata.json of: %s", d) - p.badProviderMetadata.error(message) + p.badProviderMetadata.error("Could not parse the Provider-Metadata.json of: %s", d) } if err := p.checkDomain(d); err != nil { p.badProviderMetadata.use() - message := fmt.Sprintf("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) - p.badProviderMetadata.error(message) + p.badProviderMetadata.error("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) } domain := &Domain{Name: d} From ffb1a3194429be20311db025956c5629962c2647 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 13:22:37 +0200 Subject: [PATCH 162/176] update go dependencies --- go.mod | 16 ++++++++-------- go.sum | 18 ++++++++++++++++++ 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/go.mod b/go.mod index 1cd0acc..64f6e97 100644 --- a/go.mod +++ b/go.mod @@ -10,14 +10,14 @@ require ( github.com/Intevation/jsonpath v0.2.1 github.com/ProtonMail/gopenpgp/v2 v2.9.0 github.com/PuerkitoBio/goquery v1.10.3 - github.com/gofrs/flock v0.12.1 + github.com/gofrs/flock v0.13.0 github.com/jessevdk/go-flags v1.6.1 github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 - go.etcd.io/bbolt v1.4.1 - golang.org/x/crypto v0.39.0 - golang.org/x/term v0.32.0 - golang.org/x/time v0.12.0 + go.etcd.io/bbolt v1.4.3 + golang.org/x/crypto v0.43.0 + golang.org/x/term v0.36.0 + golang.org/x/time v0.14.0 ) require ( @@ -27,7 +27,7 @@ require ( github.com/cloudflare/circl v1.6.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.4.0 // indirect - golang.org/x/net v0.41.0 // indirect - golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.26.0 // indirect + golang.org/x/net v0.46.0 // indirect + golang.org/x/sys v0.37.0 // indirect + golang.org/x/text v0.30.0 // indirect ) diff --git a/go.sum b/go.sum index 1f5b5b4..60931c3 100644 --- a/go.sum +++ b/go.sum @@ -22,6 +22,8 @@ github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxK github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= +github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw= +github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= @@ -40,9 +42,12 @@ github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.etcd.io/bbolt v1.4.1 h1:5mOV+HWjIPLEAlUGMsveaUvK2+byZMFOzojoi7bh7uI= go.etcd.io/bbolt v1.4.1/go.mod h1:c8zu2BnXWTu2XM4XcICtbGSl9cFwsXtcf9zLt2OncM8= +go.etcd.io/bbolt v1.4.3 h1:dEadXpI6G79deX5prL3QRNP6JB8UxVkqo4UPnHaNXJo= +go.etcd.io/bbolt v1.4.3/go.mod h1:tKQlpPaYCVFctUIgFKFnAlvbmB3tpy1vkTnDWohtc0E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= @@ -51,6 +56,8 @@ golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= +golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -67,6 +74,8 @@ golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= +golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -76,6 +85,7 @@ golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -89,6 +99,8 @@ golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= +golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -100,6 +112,8 @@ golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= +golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= +golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= @@ -112,8 +126,12 @@ golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= +golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= From 6cc1d7a38f67ee38e982acf8ba95d1432176dc9d Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 13:55:14 +0200 Subject: [PATCH 163/176] cleanup some dependencies with go mod tidy --- go.sum | 22 ++-------------------- 1 file changed, 2 insertions(+), 20 deletions(-) diff --git a/go.sum b/go.sum index 60931c3..ecd3d7e 100644 --- a/go.sum +++ b/go.sum @@ -20,8 +20,6 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= -github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= -github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw= github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= @@ -40,12 +38,9 @@ github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCw github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.etcd.io/bbolt v1.4.1 h1:5mOV+HWjIPLEAlUGMsveaUvK2+byZMFOzojoi7bh7uI= -go.etcd.io/bbolt v1.4.1/go.mod h1:c8zu2BnXWTu2XM4XcICtbGSl9cFwsXtcf9zLt2OncM8= go.etcd.io/bbolt v1.4.3 h1:dEadXpI6G79deX5prL3QRNP6JB8UxVkqo4UPnHaNXJo= go.etcd.io/bbolt v1.4.3/go.mod h1:tKQlpPaYCVFctUIgFKFnAlvbmB3tpy1vkTnDWohtc0E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -54,8 +49,6 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= -golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= @@ -72,8 +65,6 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= -golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -83,9 +74,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= -golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= +golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -97,8 +87,6 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= @@ -110,8 +98,6 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= -golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= -golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -124,12 +110,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= -golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= -golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= -golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= From 8740244dd82ebe482ebb1698f4328ef41062f6ff Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 14:12:16 +0200 Subject: [PATCH 164/176] fix .github/workflows action versions --- .github/workflows/go-oldstable.yml | 4 ++-- .github/workflows/go.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/go-oldstable.yml b/.github/workflows/go-oldstable.yml index 40eb8c2..75fd280 100644 --- a/.github/workflows/go-oldstable.yml +++ b/.github/workflows/go-oldstable.yml @@ -12,10 +12,10 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v6 + - uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: go-version: 'oldstable' diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index d3d9522..6d32f74 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -13,10 +13,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: go-version: "stable" From b6281012f56003645e8ee32f7409d064fc874e11 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 14:15:32 +0200 Subject: [PATCH 165/176] fix go action versions --- .github/workflows/go.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 6d32f74..a9cdcf2 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -46,12 +46,12 @@ jobs: pull-requests: write # Modver needs to write comments/status on PRs steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@v5 with: fetch-depth: 0 # Modver needs full history for comparison - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: go-version: "stable" From cf9c62fcc0e089cea4c49428987c5b6dfe5996aa Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 16:09:18 +0200 Subject: [PATCH 166/176] silence revive linter warnings that we cannot or do not want to fix yet --- csaf/advisory.go | 39 ++++++++++++++++++++++----------------- internal/misc/mime.go | 2 +- util/client.go | 2 +- util/csv.go | 2 +- util/csv_test.go | 2 +- util/set.go | 2 +- util/url_test.go | 3 ++- 7 files changed, 29 insertions(+), 23 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index cc2516a..61c9a65 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -444,10 +444,11 @@ var csafFlagLabelPattern = alternativesUnmarshal( // machine readable flag. For example, this could be a machine readable justification // code why a product is not affected. type Flag struct { - Date *string `json:"date,omitempty"` - GroupIDs *ProductGroups `json:"group_ids,omitempty"` - Label *FlagLabel `json:"label"` // required - ProductIds *Products `json:"product_ids,omitempty"` + Date *string `json:"date,omitempty"` + GroupIDs *ProductGroups `json:"group_ids,omitempty"` + Label *FlagLabel `json:"label"` // required + //revive:disable-next-line:var-naming until new major version w fix + ProductIds *Products `json:"product_ids,omitempty"` } // Flags is a list if Flag elements. @@ -606,14 +607,16 @@ type RestartRequired struct { // Remediation specifies details on how to handle (and presumably, fix) a vulnerability. type Remediation struct { - Category *RemediationCategory `json:"category"` // required - Date *string `json:"date,omitempty"` - Details *string `json:"details"` // required - Entitlements []*string `json:"entitlements,omitempty"` - GroupIds *ProductGroups `json:"group_ids,omitempty"` - ProductIds *Products `json:"product_ids,omitempty"` - RestartRequired *RestartRequired `json:"restart_required,omitempty"` - URL *string `json:"url,omitempty"` + Category *RemediationCategory `json:"category"` // required + Date *string `json:"date,omitempty"` + Details *string `json:"details"` // required + Entitlements []*string `json:"entitlements,omitempty"` + //revive:disable:var-naming until new major version w fix + GroupIds *ProductGroups `json:"group_ids,omitempty"` + ProductIds *Products `json:"product_ids,omitempty"` + //revive:enable + RestartRequired *RestartRequired `json:"restart_required,omitempty"` + URL *string `json:"url,omitempty"` } // Remediations is a list of Remediation elements. @@ -739,11 +742,13 @@ var csafThreatCategoryPattern = alternativesUnmarshal( // Threat contains information about a vulnerability that can change with time. type Threat struct { - Category *ThreatCategory `json:"category"` // required - Date *string `json:"date,omitempty"` - Details *string `json:"details"` // required - GroupIds *ProductGroups `json:"group_ids,omitempty"` - ProductIds *Products `json:"product_ids,omitempty"` + Category *ThreatCategory `json:"category"` // required + Date *string `json:"date,omitempty"` + Details *string `json:"details"` // required + //revive:disable:var-naming until new major version w fix + GroupIds *ProductGroups `json:"group_ids,omitempty"` + ProductIds *Products `json:"product_ids,omitempty"` + //revive:enable } // Threats is a list of Threat elements. diff --git a/internal/misc/mime.go b/internal/misc/mime.go index acc1ba3..3b3662d 100644 --- a/internal/misc/mime.go +++ b/internal/misc/mime.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH -package misc +package misc //revive:disable-line:var-naming import ( "fmt" diff --git a/util/client.go b/util/client.go index b4478ca..957d777 100644 --- a/util/client.go +++ b/util/client.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util +package util //revive:disable-line:var-naming import ( "context" diff --git a/util/csv.go b/util/csv.go index d84644c..6f9c0f4 100644 --- a/util/csv.go +++ b/util/csv.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util +package util //revive:disable-line:var-naming import ( "bufio" diff --git a/util/csv_test.go b/util/csv_test.go index 575d83d..0dd24c7 100644 --- a/util/csv_test.go +++ b/util/csv_test.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util +package util //revive:disable-line:var-naming import ( "bytes" diff --git a/util/set.go b/util/set.go index 61eb14b..f3d136b 100644 --- a/util/set.go +++ b/util/set.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH -package util +package util //revive:disable-line:var-naming // Set is a simple set type. type Set[K comparable] map[K]struct{} diff --git a/util/url_test.go b/util/url_test.go index dec73dc..fb2804a 100644 --- a/util/url_test.go +++ b/util/url_test.go @@ -6,7 +6,8 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util +package util //revive:disable-line:var-naming + import ( "net/url" From fb59a4060983a3a5d3800cb29c3855372fba9c11 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 16:19:13 +0200 Subject: [PATCH 167/176] fix code formatting --- internal/misc/mime.go | 2 +- util/client.go | 2 +- util/csv.go | 2 +- util/csv_test.go | 2 +- util/set.go | 2 +- util/url_test.go | 3 +-- 6 files changed, 6 insertions(+), 7 deletions(-) diff --git a/internal/misc/mime.go b/internal/misc/mime.go index 3b3662d..5bb36d0 100644 --- a/internal/misc/mime.go +++ b/internal/misc/mime.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH -package misc //revive:disable-line:var-naming +package misc //revive:disable-line:var-naming import ( "fmt" diff --git a/util/client.go b/util/client.go index 957d777..b82bc54 100644 --- a/util/client.go +++ b/util/client.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util //revive:disable-line:var-naming +package util //revive:disable-line:var-naming import ( "context" diff --git a/util/csv.go b/util/csv.go index 6f9c0f4..cffaf52 100644 --- a/util/csv.go +++ b/util/csv.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util //revive:disable-line:var-naming +package util //revive:disable-line:var-naming import ( "bufio" diff --git a/util/csv_test.go b/util/csv_test.go index 0dd24c7..68b5a3e 100644 --- a/util/csv_test.go +++ b/util/csv_test.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util //revive:disable-line:var-naming +package util //revive:disable-line:var-naming import ( "bytes" diff --git a/util/set.go b/util/set.go index f3d136b..1a625da 100644 --- a/util/set.go +++ b/util/set.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH -package util //revive:disable-line:var-naming +package util //revive:disable-line:var-naming // Set is a simple set type. type Set[K comparable] map[K]struct{} diff --git a/util/url_test.go b/util/url_test.go index fb2804a..bcf219e 100644 --- a/util/url_test.go +++ b/util/url_test.go @@ -6,8 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util //revive:disable-line:var-naming - +package util //revive:disable-line:var-naming import ( "net/url" From 46118544bed76824dc542f785d9e30c25fa24b6f Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Mon, 27 Oct 2025 10:35:38 +0100 Subject: [PATCH 168/176] upgrade dependencies, including go (#695) * change go.mod as first step towards go 1.25 raise minium version of go compatiblity to 1.24.9 and toolchain to be used to 1.25.3 * upgrade .github/workflows and documentation * update all .github/workflows/ to use the latest version of actions and the go versions accordingly. (Only some github actions use a floating tag for the major version.) * reduce places where the go versions are hardcoded: * refEr to docs/Development.md from README.md * use `go.mod` from itest.yml. * fix itest.yml: checkout before refer to go.mod * improve code cleanness: use format string w error and thus makes newer go test versions happy * update go dependencies * cleanup some dependencies with go mod tidy * fix .github/workflows action versions * fix go action versions --- .github/workflows/generate-markdown.yml | 4 +-- .github/workflows/go-oldstable.yml | 4 +-- .github/workflows/go.yml | 14 ++++----- .github/workflows/itest.yml | 13 ++++---- .github/workflows/release.yml | 8 +++-- README.md | 3 +- cmd/csaf_checker/processor.go | 6 ++-- docs/Development.md | 2 +- go.mod | 20 ++++++------- go.sum | 40 ++++++++++++------------- 10 files changed, 58 insertions(+), 56 deletions(-) diff --git a/.github/workflows/generate-markdown.yml b/.github/workflows/generate-markdown.yml index a59c944..7d9aca0 100644 --- a/.github/workflows/generate-markdown.yml +++ b/.github/workflows/generate-markdown.yml @@ -13,8 +13,8 @@ jobs: auto-update-readme: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v5 - name: Markdown autodocs - uses: dineshsonachalam/markdown-autodocs@v1.0.4 + uses: dineshsonachalam/markdown-autodocs@v1.0.7 with: output_file_paths: '[./README.md, ./docs/*.md]' diff --git a/.github/workflows/go-oldstable.yml b/.github/workflows/go-oldstable.yml index fda6413..75fd280 100644 --- a/.github/workflows/go-oldstable.yml +++ b/.github/workflows/go-oldstable.yml @@ -12,10 +12,10 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v6 with: go-version: 'oldstable' diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index b3f5389..a9cdcf2 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -13,10 +13,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v6 with: go-version: "stable" @@ -27,12 +27,12 @@ jobs: run: go vet ./... - name: gofmt - uses: Jerome1337/gofmt-action@v1.0.4 + uses: Jerome1337/gofmt-action@v1.0.5 with: gofmt-flags: "-l -d" - name: Revive Action - uses: morphy2k/revive-action@v2.7.4 + uses: morphy2k/revive-action@v2 - name: Tests run: go test -v ./... @@ -46,17 +46,17 @@ jobs: pull-requests: write # Modver needs to write comments/status on PRs steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 # Modver needs full history for comparison - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v6 with: go-version: "stable" - name: Modver - uses: bobg/modver@v2.11.0 + uses: bobg/modver@v2.12.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} pull_request_url: https://github.com/${{ github.repository }}/pull/${{ github.event.number }} diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index a99c269..878d1a3 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -5,19 +5,20 @@ jobs: build: runs-on: ubuntu-latest steps: + - name: Checkout + uses: actions/checkout@v5 + - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: "^1.23.6" + go-version-file: "go.mod" + check-latest: true - name: Set up Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v6 with: node-version: 24 - - name: Checkout - uses: actions/checkout@v4 - - name: Execute the scripts run: | sudo apt update diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f77c9e3..52406e8 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,17 +7,19 @@ on: jobs: releases-matrix: name: Release Go binaries + # use oldest available ubuntu to be compatible with more libc.so revs. runs-on: ubuntu-22.04 permissions: contents: write steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: '^1.23.6' + go-version: '^1.24.9' + check-latest: true - name: Build run: make dist diff --git a/README.md b/README.md index 897dfe0..54543a7 100644 --- a/README.md +++ b/README.md @@ -78,7 +78,8 @@ Download the binaries from the most recent release assets on Github. ### Build from sources -- A recent version of **Go** (1.23+) should be installed. [Go installation](https://go.dev/doc/install) +- Needs a [supported version](docs/Development.md) of **Go** to be installed. + [Go installation](https://go.dev/doc/install) - Clone the repository `git clone https://github.com/gocsaf/csaf.git ` diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index e427b44..584684c 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -254,14 +254,12 @@ func (p *processor) run(domains []string) (*Report, error) { if !p.checkProviderMetadata(d) { // We need to fail the domain if the PMD cannot be parsed. p.badProviderMetadata.use() - message := fmt.Sprintf("Could not parse the Provider-Metadata.json of: %s", d) - p.badProviderMetadata.error(message) + p.badProviderMetadata.error("Could not parse the Provider-Metadata.json of: %s", d) } if err := p.checkDomain(d); err != nil { p.badProviderMetadata.use() - message := fmt.Sprintf("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) - p.badProviderMetadata.error(message) + p.badProviderMetadata.error("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) } domain := &Domain{Name: d} diff --git a/docs/Development.md b/docs/Development.md index bc71c2c..f05d4d0 100644 --- a/docs/Development.md +++ b/docs/Development.md @@ -3,7 +3,7 @@ ## Supported Go versions We support the latest version and the one before -the latest version of Go (currently 1.23 and 1.24). +the latest version of Go (currently 1.24 and 1.25). ## Generated files diff --git a/go.mod b/go.mod index 5a27126..64f6e97 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,8 @@ module github.com/gocsaf/csaf/v3 -go 1.23.0 +go 1.24.9 -toolchain go1.24.4 +toolchain go1.25.3 require ( github.com/BurntSushi/toml v1.5.0 @@ -10,14 +10,14 @@ require ( github.com/Intevation/jsonpath v0.2.1 github.com/ProtonMail/gopenpgp/v2 v2.9.0 github.com/PuerkitoBio/goquery v1.10.3 - github.com/gofrs/flock v0.12.1 + github.com/gofrs/flock v0.13.0 github.com/jessevdk/go-flags v1.6.1 github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 - go.etcd.io/bbolt v1.4.1 - golang.org/x/crypto v0.39.0 - golang.org/x/term v0.32.0 - golang.org/x/time v0.12.0 + go.etcd.io/bbolt v1.4.3 + golang.org/x/crypto v0.43.0 + golang.org/x/term v0.36.0 + golang.org/x/time v0.14.0 ) require ( @@ -27,7 +27,7 @@ require ( github.com/cloudflare/circl v1.6.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.4.0 // indirect - golang.org/x/net v0.41.0 // indirect - golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.26.0 // indirect + golang.org/x/net v0.46.0 // indirect + golang.org/x/sys v0.37.0 // indirect + golang.org/x/text v0.30.0 // indirect ) diff --git a/go.sum b/go.sum index 1f5b5b4..ecd3d7e 100644 --- a/go.sum +++ b/go.sum @@ -20,8 +20,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= -github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= -github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= +github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw= +github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= @@ -38,19 +38,19 @@ github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCw github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.etcd.io/bbolt v1.4.1 h1:5mOV+HWjIPLEAlUGMsveaUvK2+byZMFOzojoi7bh7uI= -go.etcd.io/bbolt v1.4.1/go.mod h1:c8zu2BnXWTu2XM4XcICtbGSl9cFwsXtcf9zLt2OncM8= +go.etcd.io/bbolt v1.4.3 h1:dEadXpI6G79deX5prL3QRNP6JB8UxVkqo4UPnHaNXJo= +go.etcd.io/bbolt v1.4.3/go.mod h1:tKQlpPaYCVFctUIgFKFnAlvbmB3tpy1vkTnDWohtc0E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= -golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= +golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -65,8 +65,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= -golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= +golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -74,8 +74,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= -golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= +golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -87,8 +87,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= +golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -98,8 +98,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= -golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= -golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= +golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= +golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= @@ -110,10 +110,10 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= -golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= -golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= -golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= +golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= From 9607f8db94c5078f29fd46575c3fc0cdfb527c40 Mon Sep 17 00:00:00 2001 From: Christoph Klassen <100708552+cintek@users.noreply.github.com> Date: Mon, 27 Oct 2025 10:38:22 +0100 Subject: [PATCH 169/176] fix: Documentation about supported options (#697) --- docs/csaf_downloader.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index e95bc62..123694e 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -75,7 +75,7 @@ insecure = false # client_cert # not set by default # client_key # not set by default # client_passphrase # not set by default -ignoresigcheck = false +ignore_sigcheck = false # rate # set to unlimited worker = 2 # time_range # not set by default From 5a1c2a08735444720d999dbdcde5a9f529c6c3d9 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Wed, 19 Nov 2025 12:12:43 +0100 Subject: [PATCH 170/176] Add category field to ROLIE feed model. --- csaf/rolie.go | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/csaf/rolie.go b/csaf/rolie.go index d3a5ac7..9351386 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -9,8 +9,10 @@ package csaf import ( + "bytes" "encoding/json" "io" + "os" "sort" "time" @@ -169,14 +171,15 @@ type Format struct { // Entry for ROLIE. type Entry struct { - ID string `json:"id"` - Titel string `json:"title"` - Link []Link `json:"link"` - Published TimeStamp `json:"published"` - Updated TimeStamp `json:"updated"` - Summary *Summary `json:"summary,omitempty"` - Content Content `json:"content"` - Format Format `json:"format"` + ID string `json:"id"` + Titel string `json:"title"` + Link []Link `json:"link"` + Published TimeStamp `json:"published"` + Updated TimeStamp `json:"updated"` + Summary *Summary `json:"summary,omitempty"` + Content Content `json:"content"` + Format Format `json:"format"` + Category []ROLIECategory `json:"category,omitempty"` } // FeedData is the content of the ROLIE feed. @@ -196,6 +199,14 @@ type ROLIEFeed struct { // LoadROLIEFeed loads a ROLIE feed from a reader. func LoadROLIEFeed(r io.Reader) (*ROLIEFeed, error) { + all, err := io.ReadAll(r) + if err != nil { + return nil, err + } + if err := os.WriteFile("rolie.json", all, 060); err != nil { + return nil, err + } + r = bytes.NewReader(all) var rf ROLIEFeed if err := misc.StrictJSONParse(r, &rf); err != nil { return nil, err From d6bac95e454665b8d5c040b92f72cbb0f1656a74 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Wed, 19 Nov 2025 12:56:04 +0100 Subject: [PATCH 171/176] Removed debugging code --- csaf/rolie.go | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/csaf/rolie.go b/csaf/rolie.go index 9351386..d023028 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -9,10 +9,8 @@ package csaf import ( - "bytes" "encoding/json" "io" - "os" "sort" "time" @@ -199,14 +197,6 @@ type ROLIEFeed struct { // LoadROLIEFeed loads a ROLIE feed from a reader. func LoadROLIEFeed(r io.Reader) (*ROLIEFeed, error) { - all, err := io.ReadAll(r) - if err != nil { - return nil, err - } - if err := os.WriteFile("rolie.json", all, 060); err != nil { - return nil, err - } - r = bytes.NewReader(all) var rf ROLIEFeed if err := misc.StrictJSONParse(r, &rf); err != nil { return nil, err From 9a37a8ecfa695dbd973cb9e3dacc2049f14c109a Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 27 Nov 2025 15:23:34 +0100 Subject: [PATCH 172/176] Add more fields to rolie entry. --- csaf/rolie.go | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/csaf/rolie.go b/csaf/rolie.go index d023028..2b7d6fd 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -169,15 +169,22 @@ type Format struct { // Entry for ROLIE. type Entry struct { - ID string `json:"id"` - Titel string `json:"title"` - Link []Link `json:"link"` - Published TimeStamp `json:"published"` - Updated TimeStamp `json:"updated"` - Summary *Summary `json:"summary,omitempty"` - Content Content `json:"content"` - Format Format `json:"format"` - Category []ROLIECategory `json:"category,omitempty"` + Base *string `json:"base,omitempty"` + LanguageTag *string `json:"lang,omitempty"` + Author *json.RawMessage `json:"author,omitempty"` + Category []ROLIECategory `json:"category,omitempty"` + Content Content `json:"content"` + Contributor *json.RawMessage `json:"contibutor,omitempty"` + ID string `json:"id"` + Link []Link `json:"link"` + Published TimeStamp `json:"published"` + Rights *json.RawMessage `json:"rights,omitempty"` + Source *json.RawMessage `json:"source,omitempty"` + Summary *Summary `json:"summary,omitempty"` + Titel string `json:"title"` + Updated TimeStamp `json:"updated"` + Format Format `json:"format"` + Property *json.RawMessage `json:"property,omitempty"` } // FeedData is the content of the ROLIE feed. From c678a97d4307b8b1defb78f79b6115c119545cf5 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Fri, 28 Nov 2025 11:03:29 +0100 Subject: [PATCH 173/176] Update 3rd party libraries --- go.mod | 12 ++++++------ go.sum | 28 ++++++++++++++-------------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/go.mod b/go.mod index 64f6e97..c8ed550 100644 --- a/go.mod +++ b/go.mod @@ -9,14 +9,14 @@ require ( github.com/Intevation/gval v1.3.0 github.com/Intevation/jsonpath v0.2.1 github.com/ProtonMail/gopenpgp/v2 v2.9.0 - github.com/PuerkitoBio/goquery v1.10.3 + github.com/PuerkitoBio/goquery v1.11.0 github.com/gofrs/flock v0.13.0 github.com/jessevdk/go-flags v1.6.1 github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 go.etcd.io/bbolt v1.4.3 - golang.org/x/crypto v0.43.0 - golang.org/x/term v0.36.0 + golang.org/x/crypto v0.45.0 + golang.org/x/term v0.37.0 golang.org/x/time v0.14.0 ) @@ -27,7 +27,7 @@ require ( github.com/cloudflare/circl v1.6.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.4.0 // indirect - golang.org/x/net v0.46.0 // indirect - golang.org/x/sys v0.37.0 // indirect - golang.org/x/text v0.30.0 // indirect + golang.org/x/net v0.47.0 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/text v0.31.0 // indirect ) diff --git a/go.sum b/go.sum index ecd3d7e..5328cba 100644 --- a/go.sum +++ b/go.sum @@ -10,8 +10,8 @@ github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ek github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw= github.com/ProtonMail/gopenpgp/v2 v2.9.0 h1:ruLzBmwe4dR1hdnrsEJ/S7psSBmV15gFttFUPP/+/kE= github.com/ProtonMail/gopenpgp/v2 v2.9.0/go.mod h1:IldDyh9Hv1ZCCYatTuuEt1XZJ0OPjxLpTarDfglih7s= -github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo= -github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y= +github.com/PuerkitoBio/goquery v1.11.0 h1:jZ7pwMQXIITcUXNH83LLk+txlaEy6NVOfTuP43xxfqw= +github.com/PuerkitoBio/goquery v1.11.0/go.mod h1:wQHgxUOU3JGuj3oD/QFfxUdlzW6xPHfqyHre6VMY4DQ= github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM= github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA= github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= @@ -49,8 +49,8 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= -golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -65,8 +65,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= -golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -74,8 +74,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= -golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -87,8 +87,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= -golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -98,8 +98,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= -golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= -golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= @@ -110,8 +110,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= -golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= From 502376ce3a4104d62b7614557f53a1c38ad62f3c Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Fri, 28 Nov 2025 16:12:10 +0100 Subject: [PATCH 174/176] fix typo: contibutor -> contributor --- csaf/rolie.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/csaf/rolie.go b/csaf/rolie.go index 2b7d6fd..84b916c 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -174,7 +174,7 @@ type Entry struct { Author *json.RawMessage `json:"author,omitempty"` Category []ROLIECategory `json:"category,omitempty"` Content Content `json:"content"` - Contributor *json.RawMessage `json:"contibutor,omitempty"` + Contributor *json.RawMessage `json:"contributor,omitempty"` ID string `json:"id"` Link []Link `json:"link"` Published TimeStamp `json:"published"` From 52ce6bcde6f4a2c22eefc021b27f99866bff9d58 Mon Sep 17 00:00:00 2001 From: Benjamin Grandfond Date: Thu, 18 Dec 2025 12:50:37 +0100 Subject: [PATCH 175/176] fix: engine is invalid when name is missing (#710) --- csaf/advisory.go | 4 +- csaf/advisory_test.go | 11 +- ...dvisory-tracking-generator-no-version.json | 169 ++++++++++++++++++ 3 files changed, 177 insertions(+), 7 deletions(-) create mode 100644 testdata/csaf-documents/valid/advisory-tracking-generator-no-version.json diff --git a/csaf/advisory.go b/csaf/advisory.go index 61c9a65..159b980 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -891,8 +891,8 @@ func (rs Revisions) Validate() error { // Validate validates an Engine. func (e *Engine) Validate() error { - if e.Version == nil { - return errors.New("'version' is missing") + if e.Name == nil { + return errors.New("'name' is missing") } return nil } diff --git a/csaf/advisory_test.go b/csaf/advisory_test.go index 9a82884..c53834b 100644 --- a/csaf/advisory_test.go +++ b/csaf/advisory_test.go @@ -14,11 +14,12 @@ func TestLoadAdvisory(t *testing.T) { name string args args wantErr bool - }{{ - name: "Valid documents", - args: args{jsonDir: "csaf-documents/valid"}, - wantErr: false, - }, + }{ + { + name: "Valid documents", + args: args{jsonDir: "csaf-documents/valid"}, + wantErr: false, + }, { name: "Garbage trailing data", args: args{jsonDir: "csaf-documents/trailing-garbage-data"}, diff --git a/testdata/csaf-documents/valid/advisory-tracking-generator-no-version.json b/testdata/csaf-documents/valid/advisory-tracking-generator-no-version.json new file mode 100644 index 0000000..47c9907 --- /dev/null +++ b/testdata/csaf-documents/valid/advisory-tracking-generator-no-version.json @@ -0,0 +1,169 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} From 586524a97e42c3fa5b97fbcb4e1169ad1df064da Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 18 Dec 2025 13:25:44 +0100 Subject: [PATCH 176/176] Update 3rd party libraries. (#711) --- go.mod | 10 +++++----- go.sum | 24 ++++++++++++------------ 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/go.mod b/go.mod index c8ed550..8f2cd62 100644 --- a/go.mod +++ b/go.mod @@ -15,8 +15,8 @@ require ( github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 go.etcd.io/bbolt v1.4.3 - golang.org/x/crypto v0.45.0 - golang.org/x/term v0.37.0 + golang.org/x/crypto v0.46.0 + golang.org/x/term v0.38.0 golang.org/x/time v0.14.0 ) @@ -27,7 +27,7 @@ require ( github.com/cloudflare/circl v1.6.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.4.0 // indirect - golang.org/x/net v0.47.0 // indirect - golang.org/x/sys v0.38.0 // indirect - golang.org/x/text v0.31.0 // indirect + golang.org/x/net v0.48.0 // indirect + golang.org/x/sys v0.39.0 // indirect + golang.org/x/text v0.32.0 // indirect ) diff --git a/go.sum b/go.sum index 5328cba..eeaa200 100644 --- a/go.sum +++ b/go.sum @@ -49,8 +49,8 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= -golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= +golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU= +golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -65,8 +65,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= -golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= +golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= +golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -74,8 +74,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= -golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -87,8 +87,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= -golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk= +golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -98,8 +98,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= -golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= -golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= +golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q= +golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= @@ -110,8 +110,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= -golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU= +golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY= golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=