From b457dc872fa4f8031731509ba2ee31e49cba6a3c Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Fri, 1 Dec 2023 11:45:09 +0100 Subject: [PATCH 001/117] Remove usage of slices in enum generator. (#516) --- csaf/generate_cvss_enums.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/csaf/generate_cvss_enums.go b/csaf/generate_cvss_enums.go index 911b64d..eaa2cb9 100644 --- a/csaf/generate_cvss_enums.go +++ b/csaf/generate_cvss_enums.go @@ -17,7 +17,7 @@ import ( "go/format" "log" "os" - "slices" + "sort" "strings" "text/template" ) @@ -135,7 +135,7 @@ func main() { defs = append(defs, k) } } - slices.Sort(defs) + sort.Strings(defs) var source bytes.Buffer From 9073a8a282a4efd149acd88aeb23b3d1004cf1c7 Mon Sep 17 00:00:00 2001 From: Juan Ariza Toledano Date: Fri, 1 Dec 2023 15:31:25 +0100 Subject: [PATCH 002/117] feat: Add function to find product identification helpers inspecting the tree (#505) * feat: Add function to find product identification helpers inspecting the tree Signed-off-by: juan131 * fix: simplify unit tests Signed-off-by: juan131 * fix: also iterate over relationships Signed-off-by: juan131 * fix: adapt example to use new library function Signed-off-by: juan131 * Separate collecting and visiting of the product id helpers. --------- Signed-off-by: juan131 Co-authored-by: Sascha L. Teichmann --- csaf/util.go | 61 +++++++++++ csaf/util_test.go | 182 ++++++++++++++++++++++++++++++++ examples/purls_searcher/main.go | 111 +++---------------- 3 files changed, 258 insertions(+), 96 deletions(-) create mode 100644 csaf/util_test.go diff --git a/csaf/util.go b/csaf/util.go index f192f09..f8e34be 100644 --- a/csaf/util.go +++ b/csaf/util.go @@ -36,3 +36,64 @@ func ExtractProviderURL(r io.Reader, all bool) ([]string, error) { } return urls, nil } + +// CollectProductIdentificationHelpers returns a slice of all ProductIdentificationHelper +// for a given ProductID. +func (pt *ProductTree) CollectProductIdentificationHelpers(id ProductID) []*ProductIdentificationHelper { + var helpers []*ProductIdentificationHelper + pt.FindProductIdentificationHelpers( + id, func(helper *ProductIdentificationHelper) { + helpers = append(helpers, helper) + }) + return helpers +} + +// FindProductIdentificationHelpers calls visit on all ProductIdentificationHelper +// for a given ProductID by iterating over all full product names and branches +// recursively available in the ProductTree. +func (pt *ProductTree) FindProductIdentificationHelpers( + id ProductID, + visit func(*ProductIdentificationHelper), +) { + // Iterate over all full product names + if fpns := pt.FullProductNames; fpns != nil { + for _, fpn := range *fpns { + if fpn != nil && + fpn.ProductID != nil && *fpn.ProductID == id && + fpn.ProductIdentificationHelper != nil { + visit(fpn.ProductIdentificationHelper) + } + } + } + + // Iterate over branches recursively + var recBranch func(b *Branch) + recBranch = func(b *Branch) { + if b == nil { + return + } + if fpn := b.Product; fpn != nil && + fpn.ProductID != nil && *fpn.ProductID == id && + fpn.ProductIdentificationHelper != nil { + visit(fpn.ProductIdentificationHelper) + } + for _, c := range b.Branches { + recBranch(c) + } + } + for _, b := range pt.Branches { + recBranch(b) + } + + // Iterate over relationships + if rels := pt.RelationShips; rels != nil { + for _, rel := range *rels { + if rel != nil { + if fpn := rel.FullProductName; fpn != nil && fpn.ProductID != nil && + *fpn.ProductID == id && fpn.ProductIdentificationHelper != nil { + visit(fpn.ProductIdentificationHelper) + } + } + } + } +} diff --git a/csaf/util_test.go b/csaf/util_test.go new file mode 100644 index 0000000..0d5ff49 --- /dev/null +++ b/csaf/util_test.go @@ -0,0 +1,182 @@ +// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package csaf + +import ( + "reflect" + "testing" +) + +func TestProductTree_FindProductIdentificationHelpers(t *testing.T) { + type fields struct { + Branches Branches + FullProductNames *FullProductNames + RelationShips *Relationships + } + type args struct { + id ProductID + } + tests := []struct { + name string + fields fields + args args + want []*ProductIdentificationHelper + }{ + { + name: "empty product tree", + args: args{ + id: "CSAFPID-0001", + }, + want: nil, + }, + { + name: "product tree with matching full product names", + fields: fields{ + FullProductNames: &FullProductNames{{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }}, + }, + args: args{ + id: "CSAFPID-0001", + }, + want: []*ProductIdentificationHelper{{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }}, + }, + { + name: "product tree with no matching full product names", + fields: fields{ + FullProductNames: &FullProductNames{{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }}, + }, + args: args{ + id: "CSAFPID-0002", + }, + want: nil, + }, + { + name: "product tree with matching branches", + fields: fields{ + Branches: Branches{{ + Name: &[]string{"beta"}[0], + Product: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }, + Branches: Branches{{ + Name: &[]string{"beta-2"}[0], + Product: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta-2:*:*:*:*:*:*"}[0], + }, + }, + }}, + }}, + }, + args: args{ + id: "CSAFPID-0001", + }, + want: []*ProductIdentificationHelper{{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, { + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta-2:*:*:*:*:*:*"}[0], + }}, + }, + { + name: "product tree with no matching branches", + fields: fields{ + Branches: Branches{{ + Name: &[]string{"beta"}[0], + Product: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }, + Branches: Branches{{ + Name: &[]string{"beta-2"}[0], + Product: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta-2:*:*:*:*:*:*"}[0], + }, + }, + }}, + }}, + }, + args: args{ + id: "CSAFPID-0002", + }, + want: nil, + }, + { + name: "product tree with matching relationships", + fields: fields{ + RelationShips: &Relationships{{ + FullProductName: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }, + }}, + }, + args: args{ + id: "CSAFPID-0001", + }, + want: []*ProductIdentificationHelper{{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }}, + }, + { + name: "product tree with no matching relationships", + fields: fields{ + RelationShips: &Relationships{{ + FullProductName: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }, + }}, + }, + args: args{ + id: "CSAFPID-0002", + }, + want: nil, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + pt := &ProductTree{ + Branches: test.fields.Branches, + FullProductNames: test.fields.FullProductNames, + RelationShips: test.fields.RelationShips, + } + if got := pt.CollectProductIdentificationHelpers(test.args.id); !reflect.DeepEqual(got, test.want) { + tt.Errorf("ProductTree.FindProductIdentificationHelpers() = %v, want %v", + got, test.want) + } + }) + } +} diff --git a/examples/purls_searcher/main.go b/examples/purls_searcher/main.go index a91470b..c1ec3e1 100644 --- a/examples/purls_searcher/main.go +++ b/examples/purls_searcher/main.go @@ -9,9 +9,8 @@ import ( "os" "strings" - "golang.org/x/exp/slices" - "github.com/csaf-poc/csaf_distribution/v3/csaf" + "github.com/csaf-poc/csaf_distribution/v3/util" ) func main() { @@ -35,106 +34,26 @@ func main() { // run prints PURLs belonging to the given Product IDs. func run(files []string, ids string) error { - - uf := newURLFinder(strings.Split(ids, ",")) - for _, file := range files { adv, err := csaf.LoadAdvisory(file) if err != nil { return fmt.Errorf("loading %q failed: %w", file, err) } - uf.findURLs(adv) - uf.dumpURLs() - uf.clear() + + for _, id := range strings.Split(ids, ",") { + already := util.Set[csaf.PURL]{} + i := 0 + adv.ProductTree.FindProductIdentificationHelpers( + csaf.ProductID(id), + func(h *csaf.ProductIdentificationHelper) { + if h.PURL != nil && !already.Contains(*h.PURL) { + already.Add(*h.PURL) + i++ + fmt.Printf("%d. %s\n", i, *h.PURL) + } + }) + } } return nil } - -// urlFinder helps to find the URLs of a set of product ids in advisories. -type urlFinder struct { - ids []csaf.ProductID - urls [][]csaf.PURL -} - -// newURLFinder creates a new urlFinder for given ids. -func newURLFinder(ids []string) *urlFinder { - uf := &urlFinder{ - ids: make([]csaf.ProductID, len(ids)), - urls: make([][]csaf.PURL, len(ids)), - } - for i := range uf.ids { - uf.ids[i] = csaf.ProductID(ids[i]) - } - return uf -} - -// clear resets the url finder after a run on an advisory. -func (uf *urlFinder) clear() { - for i := range uf.urls { - uf.urls[i] = uf.urls[i][:0] - } -} - -// dumpURLs dumps the found URLs to stdout. -func (uf *urlFinder) dumpURLs() { - for i, urls := range uf.urls { - if len(urls) == 0 { - continue - } - fmt.Printf("Found URLs for %s:\n", uf.ids[i]) - for j, url := range urls { - fmt.Printf("%d. %s\n", j+1, url) - } - } -} - -// findURLs find the URLs in an advisory. -func (uf *urlFinder) findURLs(adv *csaf.Advisory) { - tree := adv.ProductTree - if tree == nil { - return - } - - // If we have found it and we have a valid URL add unique. - add := func(idx int, h *csaf.ProductIdentificationHelper) { - if idx != -1 && h != nil && h.PURL != nil && - !slices.Contains(uf.urls[idx], *h.PURL) { - uf.urls[idx] = append(uf.urls[idx], *h.PURL) - } - } - - // First iterate over full product names. - if names := tree.FullProductNames; names != nil { - for _, name := range *names { - if name != nil && name.ProductID != nil { - add(slices.Index(uf.ids, *name.ProductID), name.ProductIdentificationHelper) - } - } - } - - // Second traverse the branches recursively. - var recBranch func(*csaf.Branch) - recBranch = func(b *csaf.Branch) { - if p := b.Product; p != nil && p.ProductID != nil { - add(slices.Index(uf.ids, *p.ProductID), p.ProductIdentificationHelper) - } - for _, c := range b.Branches { - recBranch(c) - } - } - for _, b := range tree.Branches { - recBranch(b) - } - - // Third iterate over relationships. - if tree.RelationShips != nil { - for _, rel := range *tree.RelationShips { - if rel != nil { - if fpn := rel.FullProductName; fpn != nil && fpn.ProductID != nil { - add(slices.Index(uf.ids, *fpn.ProductID), fpn.ProductIdentificationHelper) - } - } - } - } -} From 03e418182d76d36a309912fd6694136f123d9007 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 4 Dec 2023 11:31:14 +0100 Subject: [PATCH 003/117] Advisories: Time filter download by 'updated' field in ROLIE entries. (#519) * Use 'updated' field of ROLIE field entries to time filter downloads. * More suited variable naming --- cmd/csaf_checker/processor.go | 8 +------- csaf/advisories.go | 2 +- internal/models/models.go | 8 -------- internal/models/models_test.go | 24 ------------------------ 4 files changed, 2 insertions(+), 40 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 2a5161c..7eaefef 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -33,7 +33,6 @@ import ( "golang.org/x/time/rate" "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" "github.com/csaf-poc/csaf_distribution/v3/util" ) @@ -548,7 +547,7 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { // Filter if we have date checking. if accept := p.cfg.Range; accept != nil { - if pub := time.Time(entry.Published); !pub.IsZero() && !accept.Contains(pub) { + if t := time.Time(entry.Updated); !t.IsZero() && !accept.Contains(t) { return } } @@ -667,11 +666,6 @@ func (p *processor) integrity( var folderYear *int if m := yearFromURL.FindStringSubmatch(u); m != nil { year, _ := strconv.Atoi(m[1]) - // Check if the year is in the accepted time interval. - if accept := p.cfg.Range; accept != nil && - !accept.Intersects(models.Year(year)) { - continue - } folderYear = &year } diff --git a/csaf/advisories.go b/csaf/advisories.go index 9c22ed3..5b85690 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -316,7 +316,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( // Filter if we have date checking. if afp.AgeAccept != nil { - if pub := time.Time(entry.Published); !pub.IsZero() && !afp.AgeAccept(pub) { + if t := time.Time(entry.Updated); !t.IsZero() && !afp.AgeAccept(t) { return } } diff --git a/internal/models/models.go b/internal/models/models.go index 00fead3..520cd9c 100644 --- a/internal/models/models.go +++ b/internal/models/models.go @@ -31,14 +31,6 @@ func NewTimeInterval(a, b time.Time) TimeRange { return TimeRange{a, b} } -// Year returns the time range for a given year. -func Year(year int) TimeRange { - return TimeRange{ - time.Date(year, time.January, 1, 0, 0, 0, 0, time.UTC), - time.Date(year, time.December, 31, 23, 59, 59, int(time.Second-time.Nanosecond), time.UTC), - } -} - // guessDate tries to guess an RFC 3339 date time from a given string. func guessDate(s string) (time.Time, bool) { for _, layout := range []string{ diff --git a/internal/models/models_test.go b/internal/models/models_test.go index 0217bf7..a40100f 100644 --- a/internal/models/models_test.go +++ b/internal/models/models_test.go @@ -173,27 +173,3 @@ func TestTimeRangeIntersects(t *testing.T) { } } } - -// TestTimeRangeYear checks if the Year construction works. -func TestTimeRangeYear(t *testing.T) { - var ( - year = Year(1984) - first = time.Date(1984, time.January, 1, 0, 0, 0, 0, time.UTC) - before = first.Add(-time.Nanosecond) - after = time.Date(1984+1, time.January, 1, 0, 0, 0, 0, time.UTC) - last = after.Add(-time.Nanosecond) - ) - for _, x := range []struct { - t time.Time - expected bool - }{ - {t: first, expected: true}, - {t: before, expected: false}, - {t: last, expected: true}, - {t: after, expected: false}, - } { - if got := year.Contains(x.t); got != x.expected { - t.Fatalf("%v: got %t expected %t", x.t, got, x.expected) - } - } -} From 6c8b3757aacef4e45d6fccf818a4218add03eed6 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Thu, 7 Dec 2023 16:12:26 +0100 Subject: [PATCH 004/117] Older version (#513) * Add go_legacy.yml to check for compatibility with older go versions * Remove tests already done in go.yml * fix: Update actions, use stable/oldstable in actions --------- Co-authored-by: JanHoefelmeyer --- .github/workflows/go.yml | 6 +++--- .github/workflows/go_legacy.yml | 26 ++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/go_legacy.yml diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 1f277f9..95ee8c7 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -12,12 +12,12 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Go - uses: actions/setup-go@v2 + uses: actions/setup-go@v4 with: - go-version: 1.21.0 + go-version: 'stable' - name: Build run: go build -v ./cmd/... diff --git a/.github/workflows/go_legacy.yml b/.github/workflows/go_legacy.yml new file mode 100644 index 0000000..a86368d --- /dev/null +++ b/.github/workflows/go_legacy.yml @@ -0,0 +1,26 @@ +name: Go + +on: + push: + paths: + - "**.go" + pull_request: + paths: + - "**.go" + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version: 'oldstable' + + - name: Build + run: go build -v ./cmd/... + + - name: Tests + run: go test -v ./... From 9a1c66eb8ead1a7075c3ee00bbef3bb97a469883 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 15 Jan 2024 08:59:58 +0100 Subject: [PATCH 005/117] checker: Ensure that the processor is reset before checking each domain. (#523) --- cmd/csaf_checker/processor.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 7eaefef..8f3a6c1 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -202,8 +202,8 @@ func (p *processor) close() { } } -// clean clears the fields values of the given processor. -func (p *processor) clean() { +// reset clears the fields values of the given processor. +func (p *processor) reset() { p.redirects = nil p.noneTLS = nil for k := range p.alreadyChecked { @@ -247,6 +247,8 @@ func (p *processor) run(domains []string) (*Report, error) { } for _, d := range domains { + p.reset() + if !p.checkProviderMetadata(d) { // We cannot build a report if the provider metadata cannot be parsed. log.Printf("Could not parse the Provider-Metadata.json of: %s\n", d) @@ -287,7 +289,6 @@ func (p *processor) run(domains []string) (*Report, error) { domain.Passed = rules.eval(p) report.Domains = append(report.Domains, domain) - p.clean() } return &report, nil From b858640fc173be3b4373694b036c83bd5fcc26a8 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Fri, 23 Feb 2024 14:48:39 +0100 Subject: [PATCH 006/117] docs: fix minor typo in test-keys/Readme.md (#525) --- docs/test-keys/Readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/test-keys/Readme.md b/docs/test-keys/Readme.md index 5b422fd..94c8d8f 100644 --- a/docs/test-keys/Readme.md +++ b/docs/test-keys/Readme.md @@ -1,6 +1,6 @@ OpenPGP key-pairs for testing only. -Note: as the keypairs wre fully public, **do not use them for production**. +Note: as the keypairs are fully public, **do not use them for production**. Create your own keypair(s) with the security properties and operational security you need. From 51a681ef3101506ec402e826064bc28f00a94250 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 27 Feb 2024 09:44:41 +0100 Subject: [PATCH 007/117] docs: improve link to CSAF standard documents * Add overview link to csaf.io * Fix link to specification and add link to the latest errata document. --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 54daf87..69601cd 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ # csaf_distribution -An implementation of a -[CSAF 2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html) +An implementation of a [CSAF](https://csaf.io/) +[2.0 Spec](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) +([Errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. From e658738b568ba6c6173325ce4b1081c8142b081c Mon Sep 17 00:00:00 2001 From: Christian Banse Date: Thu, 18 Apr 2024 19:51:25 +0200 Subject: [PATCH 008/117] Added support for structured logging in `csaf_aggretator` This PR adds structured logging for the aggregator service. Currently, only the text handler is used, but I can extend this to use the JSON handler as well. In this case, probably some code that is shared between the aggregator and the downloader would need to be moved to a common package. I was also wondering, whether this repo is moving to Go 1.21 at the future, since `slog` was introduced in to the standard lib in 1.21. So currently, this still relies on the `x/exp` package. Fixes #462 --- cmd/csaf_aggregator/config.go | 21 ++++++++++--- cmd/csaf_aggregator/full.go | 38 ++++++++++++++++------- cmd/csaf_aggregator/indices.go | 3 +- cmd/csaf_aggregator/interim.go | 10 +++--- cmd/csaf_aggregator/lazytransaction.go | 5 +-- cmd/csaf_aggregator/main.go | 11 ++++--- cmd/csaf_aggregator/mirror.go | 43 ++++++++++++-------------- cmd/csaf_aggregator/processor.go | 32 +++++++++++-------- csaf/advisories.go | 12 +++++++ go.mod | 2 +- go.sum | 2 ++ internal/options/options.go | 10 ++++++ 12 files changed, 122 insertions(+), 67 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index edb1fd9..2a2bef2 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -12,7 +12,6 @@ import ( "crypto/tls" "errors" "fmt" - "log" "net/http" "os" "runtime" @@ -26,6 +25,7 @@ import ( "github.com/csaf-poc/csaf_distribution/v3/internal/models" "github.com/csaf-poc/csaf_distribution/v3/internal/options" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" "golang.org/x/time/rate" ) @@ -178,9 +178,11 @@ func (p *provider) ageAccept(c *config) func(time.Time) bool { } if c.Verbose { - log.Printf( - "Setting up filter to accept advisories within time range %s to %s\n", - r[0].Format(time.RFC3339), r[1].Format(time.RFC3339)) + slog.Debug( + "Setting up filter to accept advisories within time range", + "from", r[0].Format(time.RFC3339), + "to", r[1].Format(time.RFC3339), + ) } return r.Contains } @@ -393,6 +395,17 @@ func (c *config) setDefaults() { } } +// prepareLogging sets up the structured logging. +func (cfg *config) prepareLogging() error { + ho := slog.HandlerOptions{ + Level: slog.LevelDebug, + } + handler := slog.NewTextHandler(os.Stdout, &ho) + logger := slog.New(handler) + slog.SetDefault(logger) + return nil +} + // compileIgnorePatterns compiles the configured patterns to be ignored. func (p *provider) compileIgnorePatterns() error { pm, err := filter.NewPatternMatcher(p.IgnorePattern) diff --git a/cmd/csaf_aggregator/full.go b/cmd/csaf_aggregator/full.go index 600c650..2165397 100644 --- a/cmd/csaf_aggregator/full.go +++ b/cmd/csaf_aggregator/full.go @@ -11,7 +11,6 @@ package main import ( "errors" "fmt" - "log" "os" "path/filepath" "strings" @@ -20,6 +19,7 @@ import ( "github.com/csaf-poc/csaf_distribution/v3/csaf" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" ) type fullJob struct { @@ -29,11 +29,13 @@ type fullJob struct { err error } -// setupProviderFull fetches the provider-metadate.json for a specific provider. +// setupProviderFull fetches the provider-metadata.json for a specific provider. func (w *worker) setupProviderFull(provider *provider) error { - log.Printf("worker #%d: %s (%s)\n", - w.num, provider.Name, provider.Domain) - + w.log.Info("Setting up provider", + "provider", slog.GroupValue( + slog.String("name", provider.Name), + slog.String("domain", provider.Domain), + )) w.dir = "" w.provider = provider @@ -55,7 +57,7 @@ func (w *worker) setupProviderFull(provider *provider) error { "provider-metadata.json has %d validation issues", len(errors)) } - log.Printf("provider-metadata: %s\n", w.loc) + w.log.Info("Using provider-metadata", "url", w.loc) return nil } @@ -79,7 +81,7 @@ func (w *worker) fullWork(wg *sync.WaitGroup, jobs <-chan *fullJob) { func (p *processor) full() error { if p.cfg.runAsMirror() { - log.Println("Running in aggregator mode") + p.log.Info("Running in aggregator mode") // check if we need to setup a remote validator if p.cfg.RemoteValidatorOptions != nil { @@ -96,16 +98,18 @@ func (p *processor) full() error { }() } } else { - log.Println("Running in lister mode") + p.log.Info("Running in lister mode") } queue := make(chan *fullJob) var wg sync.WaitGroup - log.Printf("Starting %d workers.\n", p.cfg.Workers) + p.log.Info("Starting workers...", "num", p.cfg.Workers) + for i := 1; i <= p.cfg.Workers; i++ { wg.Add(1) w := newWorker(i, p) + go w.fullWork(&wg, queue) } @@ -135,12 +139,22 @@ func (p *processor) full() error { for i := range jobs { j := &jobs[i] if j.err != nil { - log.Printf("error: '%s' failed: %v\n", j.provider.Name, j.err) + p.log.Error("Job execution failed", + slog.Group("job", + slog.Group("provider"), + "name", j.provider.Name, + ), + "err", j.err, + ) continue } if j.aggregatorProvider == nil { - log.Printf( - "error: '%s' does not produce any result.\n", j.provider.Name) + p.log.Error("Job did not produce any result", + slog.Group("job", + slog.Group("provider"), + "name", j.provider.Name, + ), + ) continue } diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 69954bd..cc91b45 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -12,7 +12,6 @@ import ( "bufio" "encoding/csv" "fmt" - "log" "os" "path/filepath" "sort" @@ -377,7 +376,7 @@ func (w *worker) writeIndices() error { } for label, summaries := range w.summaries { - log.Printf("%s: %d\n", label, len(summaries)) + w.log.Debug("Writing indices", "label", label, "summaries.num", len(summaries)) if err := w.writeInterims(label, summaries); err != nil { return err } diff --git a/cmd/csaf_aggregator/interim.go b/cmd/csaf_aggregator/interim.go index bdd5ebc..cf4a937 100644 --- a/cmd/csaf_aggregator/interim.go +++ b/cmd/csaf_aggregator/interim.go @@ -17,7 +17,6 @@ import ( "errors" "fmt" "io" - "log" "net/http" "os" "path/filepath" @@ -102,12 +101,12 @@ func (w *worker) checkInterims( // XXX: Should we return an error here? for _, e := range errors { - log.Printf("validation error: %s: %v\n", url, e) + w.log.Error("validation error", "url", url, "err", e) } // We need to write the changed content. - // This will start the transcation if not already started. + // This will start the transaction if not already started. dst, err := tx.Dst() if err != nil { return nil, err @@ -159,8 +158,7 @@ func (w *worker) checkInterims( // setupProviderInterim prepares the worker for a specific provider. func (w *worker) setupProviderInterim(provider *provider) { - log.Printf("worker #%d: %s (%s)\n", - w.num, provider.Name, provider.Domain) + w.log.Info("Setting up worker", provider.Name, provider.Domain) w.dir = "" w.provider = provider @@ -262,7 +260,7 @@ func (p *processor) interim() error { queue := make(chan *interimJob) var wg sync.WaitGroup - log.Printf("Starting %d workers.\n", p.cfg.Workers) + p.log.Info("Starting workers...", "num", p.cfg.Workers) for i := 1; i <= p.cfg.Workers; i++ { wg.Add(1) w := newWorker(i, p) diff --git a/cmd/csaf_aggregator/lazytransaction.go b/cmd/csaf_aggregator/lazytransaction.go index a2b1e94..458002f 100644 --- a/cmd/csaf_aggregator/lazytransaction.go +++ b/cmd/csaf_aggregator/lazytransaction.go @@ -9,11 +9,11 @@ package main import ( - "log" "os" "path/filepath" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" ) type lazyTransaction struct { @@ -85,7 +85,8 @@ func (lt *lazyTransaction) commit() error { os.RemoveAll(lt.dst) return err } - log.Printf("Move %q -> %q\n", symlink, lt.src) + + slog.Debug("Moving directory", "from", symlink, "to", lt.src) if err := os.Rename(symlink, lt.src); err != nil { os.RemoveAll(lt.dst) return err diff --git a/cmd/csaf_aggregator/main.go b/cmd/csaf_aggregator/main.go index 74a9670..b738a7e 100644 --- a/cmd/csaf_aggregator/main.go +++ b/cmd/csaf_aggregator/main.go @@ -15,7 +15,9 @@ import ( "path/filepath" "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gofrs/flock" + "golang.org/x/exp/slog" ) func lock(lockFile *string, fn func() error) error { @@ -44,8 +46,9 @@ func lock(lockFile *string, fn func() error) error { func main() { _, cfg, err := parseArgsConfig() - options.ErrorCheck(err) - options.ErrorCheck(cfg.prepare()) - p := processor{cfg: cfg} - options.ErrorCheck(lock(cfg.LockFile, p.process)) + cfg.prepareLogging() + options.ErrorCheckStructured(err) + options.ErrorCheckStructured(cfg.prepare()) + p := processor{cfg: cfg, log: slog.Default()} + options.ErrorCheckStructured(lock(cfg.LockFile, p.process)) } diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 3acb48e..0779a5b 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -16,7 +16,7 @@ import ( "encoding/json" "fmt" "io" - "log" + "log/slog" "net/http" "net/url" "os" @@ -47,7 +47,7 @@ func (w *worker) mirror() (*csaf.AggregatorCSAFProvider, error) { if err != nil && w.dir != "" { // If something goes wrong remove the debris. if err := os.RemoveAll(w.dir); err != nil { - log.Printf("error: %v\n", err) + w.log.Error("Could not remove directory", "path", w.dir, "err", err) } } return result, err @@ -166,7 +166,7 @@ func (w *worker) writeProviderMetadata() error { {Expr: `$.public_openpgp_keys`, Action: util.ReMarshalMatcher(&pm.PGPKeys)}, }, w.metadataProvider); err != nil { // only log the errors - log.Printf("extracting data from orignal provider failed: %v\n", err) + w.log.Error("Extracting data from original provider failed", "err", err) } // We are mirroring the remote public keys, too. @@ -196,11 +196,11 @@ func (w *worker) mirrorPGPKeys(pm *csaf.ProviderMetadata) error { for i := range pm.PGPKeys { pgpKey := &pm.PGPKeys[i] if pgpKey.URL == nil { - log.Printf("ignoring PGP key without URL: %s\n", pgpKey.Fingerprint) + w.log.Warn("Ignoring PGP key without URL", "fingerprint", pgpKey.Fingerprint) continue } if _, err := hex.DecodeString(string(pgpKey.Fingerprint)); err != nil { - log.Printf("ignoring PGP with invalid fingerprint: %s\n", *pgpKey.URL) + w.log.Warn("Ignoring PGP key with invalid fingerprint", "url", *pgpKey.URL) continue } @@ -344,7 +344,7 @@ func (w *worker) doMirrorTransaction() error { // Check if there is a sysmlink already. target := filepath.Join(w.processor.cfg.Folder, w.provider.Name) - log.Printf("target: '%s'\n", target) + w.log.Debug("Checking for path existance", "path", target) exists, err := util.PathExists(target) if err != nil { @@ -359,7 +359,7 @@ func (w *worker) doMirrorTransaction() error { } } - log.Printf("sym link: %s -> %s\n", w.dir, target) + w.log.Debug("Creating symbol", "from", w.dir, "to", target) // Create a new symlink if err := os.Symlink(w.dir, target); err != nil { @@ -368,7 +368,7 @@ func (w *worker) doMirrorTransaction() error { } // Move the symlink - log.Printf("Move: %s -> %s\n", target, webTarget) + w.log.Debug("Moving symbol", "from", target, "to", webTarget) if err := os.Rename(target, webTarget); err != nil { os.RemoveAll(w.dir) return err @@ -499,14 +499,14 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) u, err := url.Parse(file.URL()) if err != nil { - log.Printf("error: %s\n", err) + w.log.Error("Could not parse advisory file URL", "err", err) continue } // Should we ignore this advisory? if w.provider.ignoreURL(file.URL(), w.processor.cfg) { if w.processor.cfg.Verbose { - log.Printf("Ignoring %s: %q\n", w.provider.Name, file.URL()) + w.log.Info("Ignoring advisory", slog.Group("provider", "name", w.provider.Name), "file", file) } continue } @@ -514,7 +514,7 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) // Ignore not conforming filenames. filename := filepath.Base(u.Path) if !util.ConformingFileName(filename) { - log.Printf("Not conforming filename %q. Ignoring.\n", filename) + w.log.Warn("Ignoring advisory because of non-conforming filename", "filename", filename) continue } @@ -531,19 +531,18 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) } if err := downloadJSON(w.client, file.URL(), download); err != nil { - log.Printf("error: %v\n", err) + w.log.Error("Error while downloading JSON", "err", err) continue } // Check against CSAF schema. errors, err := csaf.ValidateCSAF(advisory) if err != nil { - log.Printf("error: %s: %v", file, err) + w.log.Error("Error while validating CSAF schema", "err", err) continue } if len(errors) > 0 { - log.Printf("CSAF file %s has %d validation errors.\n", - file, len(errors)) + w.log.Error("CSAF file has validation errors", "num.errors", len(errors), "file", file) continue } @@ -551,29 +550,27 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) if rmv := w.processor.remoteValidator; rmv != nil { rvr, err := rmv.Validate(advisory) if err != nil { - log.Printf("Calling remote validator failed: %s\n", err) + w.log.Error("Calling remote validator failed", "err", err) continue } if !rvr.Valid { - log.Printf( - "CSAF file %s does not validate remotely.\n", file) + w.log.Error("CSAF file does not validate remotely", "file", file.URL()) continue } } sum, err := csaf.NewAdvisorySummary(w.expr, advisory) if err != nil { - log.Printf("error: %s: %v\n", file, err) + w.log.Error("Error while creating new advisory", "file", file, "err", err) continue } if util.CleanFileName(sum.ID) != filename { - log.Printf("ID %q does not match filename %s", - sum.ID, filename) + w.log.Error("ID mismatch", "id", sum.ID, "filename", filename) } if err := w.extractCategories(label, advisory); err != nil { - log.Printf("error: %s: %v\n", file, err) + w.log.Error("Could not extract categories", "file", file, "err", err) continue } @@ -624,7 +621,7 @@ func (w *worker) downloadSignatureOrSign(url, fname string, data []byte) error { if err != nil { if err != errNotFound { - log.Printf("error: %s: %v\n", url, err) + w.log.Error("Could not find signature URL", "url", url, "err", err) } // Sign it our self. if sig, err = w.sign(data); err != nil { diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index ccd5062..9a71b90 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -10,14 +10,14 @@ package main import ( "fmt" - "log" "os" "path/filepath" - "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" "github.com/csaf-poc/csaf_distribution/v3/util" + + "github.com/ProtonMail/gopenpgp/v2/crypto" + "golang.org/x/exp/slog" ) type processor struct { @@ -26,6 +26,9 @@ type processor struct { // remoteValidator is a globally configured remote validator. remoteValidator csaf.RemoteValidator + + // log is the structured logger for the whole processor. + log *slog.Logger } type summary struct { @@ -48,6 +51,7 @@ type worker struct { dir string // Directory to store data to. summaries map[string][]summary // the summaries of the advisories. categories map[string]util.Set[string] // the categories per label. + log *slog.Logger // the structured logger, supplied with the worker number. } func newWorker(num int, processor *processor) *worker { @@ -55,6 +59,7 @@ func newWorker(num int, processor *processor) *worker { num: num, processor: processor, expr: util.NewPathEval(), + log: processor.log.With(slog.Int("worker", num)), } } @@ -86,9 +91,10 @@ func (w *worker) locateProviderMetadata(domain string) error { if w.processor.cfg.Verbose { for i := range lpmd.Messages { - log.Printf( - "Loading provider-metadata.json of %q: %s\n", - domain, lpmd.Messages[i].Message) + w.log.Info( + "Loading provider-metadata.json", + "domain", domain, + "message", lpmd.Messages[i].Message) } } @@ -141,7 +147,7 @@ func (p *processor) removeOrphans() error { fi, err := entry.Info() if err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not retrieve file info", "err", err) continue } @@ -153,13 +159,13 @@ func (p *processor) removeOrphans() error { d := filepath.Join(path, entry.Name()) r, err := filepath.EvalSymlinks(d) if err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not evaluate symlink", "err", err) continue } fd, err := os.Stat(r) if err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not retrieve file stats", "err", err) continue } @@ -169,18 +175,18 @@ func (p *processor) removeOrphans() error { } // Remove the link. - log.Printf("removing link %s -> %s\n", d, r) + p.log.Info("Removing link", "path", fmt.Sprintf("%s -> %s", d, r)) if err := os.Remove(d); err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not remove symlink", "err", err) continue } // Only remove directories which are in our folder. if rel, err := filepath.Rel(prefix, r); err == nil && rel == filepath.Base(r) { - log.Printf("removing directory %s\n", r) + p.log.Info("Remove directory", "path", r) if err := os.RemoveAll(r); err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not remove directory", "err", err) } } } diff --git a/csaf/advisories.go b/csaf/advisories.go index 5b85690..abd55c6 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -13,6 +13,7 @@ import ( "fmt" "io" "log" + "log/slog" "net/http" "net/url" "strings" @@ -23,6 +24,7 @@ import ( // AdvisoryFile constructs the urls of a remote file. type AdvisoryFile interface { + slog.LogValuer URL() string SHA256URL() string SHA512URL() string @@ -46,6 +48,11 @@ func (paf PlainAdvisoryFile) SHA512URL() string { return string(paf) + ".sha512" // SignURL returns the URL of signature file of this advisory. func (paf PlainAdvisoryFile) SignURL() string { return string(paf) + ".asc" } +// LogValue implements [slog.LogValuer] +func (paf PlainAdvisoryFile) LogValue() slog.Value { + return slog.GroupValue(slog.String("url", paf.URL())) +} + // HashedAdvisoryFile is a more involed version of checkFile. // Here each component can be given explicitly. // If a component is not given it is constructed by @@ -71,6 +78,11 @@ func (haf HashedAdvisoryFile) SHA512URL() string { return haf.name(2, ".sha512") // SignURL returns the URL of signature file of this advisory. func (haf HashedAdvisoryFile) SignURL() string { return haf.name(3, ".asc") } +// LogValue implements [slog.LogValuer] +func (haf HashedAdvisoryFile) LogValue() slog.Value { + return slog.GroupValue(slog.String("url", haf.URL())) +} + // AdvisoryFileProcessor implements the extraction of // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { diff --git a/go.mod b/go.mod index 469c8a3..1f6f51d 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 go.etcd.io/bbolt v1.3.8 golang.org/x/crypto v0.14.0 - golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa + golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f golang.org/x/term v0.13.0 golang.org/x/time v0.3.0 ) diff --git a/go.sum b/go.sum index 3a101d4..cbbb382 100644 --- a/go.sum +++ b/go.sum @@ -53,6 +53,8 @@ golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa h1:FRnLl4eNAQl8hwxVVC17teOw8kdjVDVAiFMtgUdTSRQ= golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f h1:99ci1mjWVBWwJiEKYY6jWa4d2nTQVIEhZIptnrVb1XY= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= diff --git a/internal/options/options.go b/internal/options/options.go index 961b4b4..ffd699b 100644 --- a/internal/options/options.go +++ b/internal/options/options.go @@ -19,6 +19,7 @@ import ( "github.com/mitchellh/go-homedir" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" ) // Parser helps parsing command line arguments and loading @@ -147,3 +148,12 @@ func ErrorCheck(err error) { log.Fatalf("error: %v\n", err) } } + +// ErrorCheck checks if err is not nil and terminates +// the program if so. +func ErrorCheckStructured(err error) { + if err != nil { + slog.Error("Error while executing program", "err", err) + os.Exit(1) + } +} From fb1cf32e17f2dd007efc979c8cbb3fc80786f2e6 Mon Sep 17 00:00:00 2001 From: Christian Banse Date: Fri, 19 Apr 2024 09:35:36 +0200 Subject: [PATCH 009/117] Fixed linting errors --- cmd/csaf_aggregator/config.go | 2 +- internal/options/options.go | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 2a2bef2..f1e602d 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -396,7 +396,7 @@ func (c *config) setDefaults() { } // prepareLogging sets up the structured logging. -func (cfg *config) prepareLogging() error { +func (c *config) prepareLogging() error { ho := slog.HandlerOptions{ Level: slog.LevelDebug, } diff --git a/internal/options/options.go b/internal/options/options.go index ffd699b..d8574ff 100644 --- a/internal/options/options.go +++ b/internal/options/options.go @@ -149,8 +149,9 @@ func ErrorCheck(err error) { } } -// ErrorCheck checks if err is not nil and terminates -// the program if so. +// ErrorCheckStructured checks if err is not nil and terminates the program if +// so. This is similar to [ErrorCheck], but uses [slog] instead of the +// non-structured Go logging. func ErrorCheckStructured(err error) { if err != nil { slog.Error("Error while executing program", "err", err) From 39a29e39f1272bee8794413b1372cf3a592fc3c6 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Mon, 22 Apr 2024 13:11:30 +0200 Subject: [PATCH 010/117] Change Licenses from MIT to Apache 2.0 --- LICENSES/Apache-2.0.txt | 73 +++++++++++++++++++++ LICENSES/MIT.txt | 21 ------ Makefile | 6 +- README.md | 12 +++- cmd/csaf_aggregator/client.go | 6 +- cmd/csaf_aggregator/config.go | 6 +- cmd/csaf_aggregator/files.go | 6 +- cmd/csaf_aggregator/full.go | 6 +- cmd/csaf_aggregator/indices.go | 6 +- cmd/csaf_aggregator/interim.go | 6 +- cmd/csaf_aggregator/lazytransaction.go | 6 +- cmd/csaf_aggregator/lister.go | 6 +- cmd/csaf_aggregator/main.go | 6 +- cmd/csaf_aggregator/mirror.go | 6 +- cmd/csaf_aggregator/processor.go | 6 +- cmd/csaf_checker/config.go | 6 +- cmd/csaf_checker/links.go | 6 +- cmd/csaf_checker/links_test.go | 6 +- cmd/csaf_checker/main.go | 6 +- cmd/csaf_checker/processor.go | 6 +- cmd/csaf_checker/report.go | 6 +- cmd/csaf_checker/reporters.go | 6 +- cmd/csaf_checker/roliecheck.go | 6 +- cmd/csaf_checker/rules.go | 6 +- cmd/csaf_downloader/config.go | 6 +- cmd/csaf_downloader/downloader.go | 6 +- cmd/csaf_downloader/forwarder.go | 6 +- cmd/csaf_downloader/forwarder_test.go | 6 +- cmd/csaf_downloader/main.go | 6 +- cmd/csaf_downloader/stats.go | 6 +- cmd/csaf_downloader/stats_test.go | 6 +- cmd/csaf_provider/actions.go | 6 +- cmd/csaf_provider/config.go | 6 +- cmd/csaf_provider/controller.go | 6 +- cmd/csaf_provider/create.go | 6 +- cmd/csaf_provider/files.go | 6 +- cmd/csaf_provider/indices.go | 6 +- cmd/csaf_provider/main.go | 6 +- cmd/csaf_provider/mux.go | 6 +- cmd/csaf_provider/rolie.go | 6 +- cmd/csaf_provider/tmpl/create.html | 6 +- cmd/csaf_provider/tmpl/index.html | 6 +- cmd/csaf_provider/tmpl/upload.html | 6 +- cmd/csaf_provider/transaction.go | 6 +- cmd/csaf_uploader/config.go | 6 +- cmd/csaf_uploader/main.go | 6 +- cmd/csaf_uploader/processor.go | 6 +- cmd/csaf_validator/main.go | 6 +- csaf/advisories.go | 6 +- csaf/advisory.go | 6 +- csaf/cvss20enums.go | 6 +- csaf/cvss3enums.go | 6 +- csaf/doc.go | 6 +- csaf/generate_cvss_enums.go | 12 ++-- csaf/models.go | 6 +- csaf/providermetaloader.go | 6 +- csaf/remotevalidation.go | 6 +- csaf/rolie.go | 6 +- csaf/summary.go | 6 +- csaf/util.go | 6 +- csaf/util_test.go | 6 +- csaf/validation.go | 6 +- docs/scripts/DNSConfigForItest.sh | 6 +- docs/scripts/TLSClientConfigsForITest.sh | 6 +- docs/scripts/TLSConfigsForITest.sh | 6 +- docs/scripts/createCCForITest.sh | 6 +- docs/scripts/createRootCAForITest.sh | 6 +- docs/scripts/createWebserverCertForITest.sh | 6 +- docs/scripts/downloadExamples.sh | 6 +- docs/scripts/setupProviderForITest.sh | 6 +- docs/scripts/setupValidationService.sh | 6 +- docs/scripts/testAggregator.sh | 6 +- docs/scripts/testChecker.sh | 6 +- docs/scripts/testDownloader.sh | 6 +- docs/scripts/uploadToProvider.sh | 6 +- internal/certs/certs.go | 6 +- internal/certs/certs_test.go | 6 +- internal/filter/filter.go | 6 +- internal/filter/filter_test.go | 6 +- internal/misc/doc.go | 6 +- internal/misc/mime.go | 6 +- internal/misc/mime_test.go | 6 +- internal/models/models.go | 6 +- internal/models/models_test.go | 6 +- internal/options/log.go | 6 +- internal/options/log_test.go | 6 +- internal/options/options.go | 6 +- internal/options/options_test.go | 6 +- util/client.go | 6 +- util/csv.go | 6 +- util/doc.go | 6 +- util/file.go | 6 +- util/file_test.go | 6 +- util/hash.go | 6 +- util/json.go | 6 +- util/set.go | 6 +- util/url.go | 6 +- util/version.go | 6 +- 98 files changed, 372 insertions(+), 310 deletions(-) create mode 100644 LICENSES/Apache-2.0.txt delete mode 100644 LICENSES/MIT.txt diff --git a/LICENSES/Apache-2.0.txt b/LICENSES/Apache-2.0.txt new file mode 100644 index 0000000..137069b --- /dev/null +++ b/LICENSES/Apache-2.0.txt @@ -0,0 +1,73 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/LICENSES/MIT.txt b/LICENSES/MIT.txt deleted file mode 100644 index 57165e6..0000000 --- a/LICENSES/MIT.txt +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/Makefile b/Makefile index 19e31c7..b4b3964 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ -# This file is Free Software under the MIT License -# without warranty, see README.md and LICENSES/MIT.txt for details. +# This file is Free Software under the Apache-2.0 License +# without warranty, see README.md and LICENSES/Apache-2.0.txt for details. # -# SPDX-License-Identifier: MIT +# SPDX-License-Identifier: Apache-2.0 # # SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) # Software-Engineering: 2021 Intevation GmbH diff --git a/README.md b/README.md index 69601cd..78342f5 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,13 @@ + + # csaf_distribution An implementation of a [CSAF](https://csaf.io/) @@ -90,7 +100,7 @@ For further details of the development process consult our [development page](./ ## License -- `csaf_distribution` is licensed as Free Software under MIT License. +- `csaf_distribution` is licensed as Free Software under the terms of the [Apache License, Version 2.0](./LICENSES/Apache-2.0.txt). - See the specific source files for details, the license itself can be found in the directory `LICENSES/`. diff --git a/cmd/csaf_aggregator/client.go b/cmd/csaf_aggregator/client.go index deb108a..8200d34 100644 --- a/cmd/csaf_aggregator/client.go +++ b/cmd/csaf_aggregator/client.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index edb1fd9..711238c 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/files.go b/cmd/csaf_aggregator/files.go index adf04aa..18ccbb6 100644 --- a/cmd/csaf_aggregator/files.go +++ b/cmd/csaf_aggregator/files.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/full.go b/cmd/csaf_aggregator/full.go index 600c650..fb8e0f9 100644 --- a/cmd/csaf_aggregator/full.go +++ b/cmd/csaf_aggregator/full.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 69954bd..598685c 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/interim.go b/cmd/csaf_aggregator/interim.go index bdd5ebc..692841f 100644 --- a/cmd/csaf_aggregator/interim.go +++ b/cmd/csaf_aggregator/interim.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/lazytransaction.go b/cmd/csaf_aggregator/lazytransaction.go index a2b1e94..16470d3 100644 --- a/cmd/csaf_aggregator/lazytransaction.go +++ b/cmd/csaf_aggregator/lazytransaction.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/lister.go b/cmd/csaf_aggregator/lister.go index a3bfd29..4d758e4 100644 --- a/cmd/csaf_aggregator/lister.go +++ b/cmd/csaf_aggregator/lister.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/main.go b/cmd/csaf_aggregator/main.go index 74a9670..d5d04e5 100644 --- a/cmd/csaf_aggregator/main.go +++ b/cmd/csaf_aggregator/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 3acb48e..32e0cbf 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index ccd5062..fb9acde 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/config.go b/cmd/csaf_checker/config.go index 3502443..ac9ce62 100644 --- a/cmd/csaf_checker/config.go +++ b/cmd/csaf_checker/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_checker/links.go b/cmd/csaf_checker/links.go index 5784489..0456ace 100644 --- a/cmd/csaf_checker/links.go +++ b/cmd/csaf_checker/links.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/links_test.go b/cmd/csaf_checker/links_test.go index 3229511..8abf4e6 100644 --- a/cmd/csaf_checker/links_test.go +++ b/cmd/csaf_checker/links_test.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/main.go b/cmd/csaf_checker/main.go index 73a5cce..752fdf8 100644 --- a/cmd/csaf_checker/main.go +++ b/cmd/csaf_checker/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 8f3a6c1..451a315 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_checker/report.go b/cmd/csaf_checker/report.go index 2b53bb2..9b5251b 100644 --- a/cmd/csaf_checker/report.go +++ b/cmd/csaf_checker/report.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_checker/reporters.go b/cmd/csaf_checker/reporters.go index c707a14..016d371 100644 --- a/cmd/csaf_checker/reporters.go +++ b/cmd/csaf_checker/reporters.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index 94b1c2f..53d1150 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_checker/rules.go b/cmd/csaf_checker/rules.go index 6981b6b..eadbbb2 100644 --- a/cmd/csaf_checker/rules.go +++ b/cmd/csaf_checker/rules.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 367780f..39a4d05 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 7fa0c7c..38203bf 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022, 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2022, 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index eda6595..13957d5 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/forwarder_test.go b/cmd/csaf_downloader/forwarder_test.go index dc515ad..edfa476 100644 --- a/cmd/csaf_downloader/forwarder_test.go +++ b/cmd/csaf_downloader/forwarder_test.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/main.go b/cmd/csaf_downloader/main.go index daff163..9364b88 100644 --- a/cmd/csaf_downloader/main.go +++ b/cmd/csaf_downloader/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_downloader/stats.go b/cmd/csaf_downloader/stats.go index 237420a..94a38de 100644 --- a/cmd/csaf_downloader/stats.go +++ b/cmd/csaf_downloader/stats.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/stats_test.go b/cmd/csaf_downloader/stats_test.go index b3ab914..79406c7 100644 --- a/cmd/csaf_downloader/stats_test.go +++ b/cmd/csaf_downloader/stats_test.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_provider/actions.go b/cmd/csaf_provider/actions.go index 54d4e24..8f385e6 100644 --- a/cmd/csaf_provider/actions.go +++ b/cmd/csaf_provider/actions.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_provider/config.go b/cmd/csaf_provider/config.go index af99cc1..49a7204 100644 --- a/cmd/csaf_provider/config.go +++ b/cmd/csaf_provider/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/controller.go b/cmd/csaf_provider/controller.go index c8680ff..7f64fe2 100644 --- a/cmd/csaf_provider/controller.go +++ b/cmd/csaf_provider/controller.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/create.go b/cmd/csaf_provider/create.go index 8e882a5..56893c6 100644 --- a/cmd/csaf_provider/create.go +++ b/cmd/csaf_provider/create.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/files.go b/cmd/csaf_provider/files.go index 0b3c5ed..39a97e3 100644 --- a/cmd/csaf_provider/files.go +++ b/cmd/csaf_provider/files.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/indices.go b/cmd/csaf_provider/indices.go index a7ecd3b..805371b 100644 --- a/cmd/csaf_provider/indices.go +++ b/cmd/csaf_provider/indices.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/main.go b/cmd/csaf_provider/main.go index 2264676..8740e81 100644 --- a/cmd/csaf_provider/main.go +++ b/cmd/csaf_provider/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/mux.go b/cmd/csaf_provider/mux.go index 34b7e2e..021c074 100644 --- a/cmd/csaf_provider/mux.go +++ b/cmd/csaf_provider/mux.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/rolie.go b/cmd/csaf_provider/rolie.go index ea48480..98448bd 100644 --- a/cmd/csaf_provider/rolie.go +++ b/cmd/csaf_provider/rolie.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/tmpl/create.html b/cmd/csaf_provider/tmpl/create.html index 74fef6d..0b06f6f 100644 --- a/cmd/csaf_provider/tmpl/create.html +++ b/cmd/csaf_provider/tmpl/create.html @@ -1,8 +1,8 @@ diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index b02165b..81a45fa 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -100,22 +100,12 @@ The following example file documents all available configuration options: #tlps = ["csaf", "white", "amber", "green", "red"] # Make the provider create a ROLIE service document. -#create_service_document = true +#create_service_document = false # Make the provider create a ROLIE category document from a list of strings. # If a list item starts with `expr:` # the rest of the string is used as a JsonPath expression # to extract a string from the incoming advisories. -# If the result of the expression is a string this string -# is used. If the result is an array each element of -# this array is tested if it is a string or an array. -# If this test fails the expression fails. If the -# test succeeds the rules are applied recursively to -# collect all strings in the result. -# Suggested expressions are: -# - vendor, product family and product names: "expr:$.product_tree..branches[?(@.category==\"vendor\" || @.category==\"product_family\" || @.category==\"product_name\")].name" -# - CVEs: "expr:$.vulnerabilities[*].cve" -# - CWEs: "expr:$.vulnerabilities[*].cwe.id" # Strings not starting with `expr:` are taken verbatim. # By default no category documents are created. # This example provides an overview over the syntax, From 2f9d5658eb8c34dd782d95b9cd030e348163d30d Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Mon, 13 May 2024 11:50:06 +0200 Subject: [PATCH 022/117] docs: remove unused license file (#544) * Remove LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt as the only code using it was already removed with 6b9ecead89c5b40e86928c6e7f416903e0a495e1. --- LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt | 51 ------------------- 1 file changed, 51 deletions(-) delete mode 100644 LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt diff --git a/LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt b/LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt deleted file mode 100644 index fa1aad8..0000000 --- a/LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt +++ /dev/null @@ -1,51 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. From 7a5347803abc06dffbd106b8544e696d81ac3056 Mon Sep 17 00:00:00 2001 From: Florian von Samson <167841080+fvsamson@users.noreply.github.com> Date: Mon, 13 May 2024 14:36:03 +0200 Subject: [PATCH 023/117] docs: improve README.md's first sentence * Improve the structure of the sentence and the two links. --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 4c02b8f..bc9ae2a 100644 --- a/README.md +++ b/README.md @@ -10,9 +10,9 @@ # csaf_distribution -An implementation of a [CSAF](https://csaf.io/) -[2.0 Spec](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) -([Errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) +Implements a [CSAF](https://csaf.io/) +([specification v2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) +and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. From 33bd6bd78786564f56f458618df611e700eeeea3 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 12 Jun 2024 10:08:06 +0200 Subject: [PATCH 024/117] Extend unittest coverage in util --- util/csv_test.go | 40 +++++++++ util/file_test.go | 141 ++++++++++++++++++++++++++++++- util/hash_test.go | 109 ++++++++++++++++++++++++ util/json_test.go | 209 ++++++++++++++++++++++++++++++++++++++++++++++ util/set_test.go | 65 ++++++++++++++ util/url_test.go | 36 ++++++++ 6 files changed, 599 insertions(+), 1 deletion(-) create mode 100644 util/csv_test.go create mode 100644 util/hash_test.go create mode 100644 util/json_test.go create mode 100644 util/set_test.go create mode 100644 util/url_test.go diff --git a/util/csv_test.go b/util/csv_test.go new file mode 100644 index 0000000..a744b75 --- /dev/null +++ b/util/csv_test.go @@ -0,0 +1,40 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "bytes" + "testing" +) + +func TestCSV(t *testing.T) { + buf := new(bytes.Buffer) + csvWriter := NewFullyQuotedCSWWriter(buf) + for _, x := range [][]string{{"a", "b", "c"}, {"d", "e", "f"}} { + err := csvWriter.Write(x) + if err != nil { + t.Error(err) + } + } + + csvWriter.Flush() + err := csvWriter.Error() + if err != nil { + t.Error(err) + } + for _, want := range []string{`"a","b","c"`, `"d","e","f"`} { + got, err := buf.ReadString('\n') + if err != nil { + t.Error(err) + } + if got[:len(got)-1] != want { + t.Errorf("FullyQuotedCSWWriter: Expected %q but got %q.", want, got) + } + } +} diff --git a/util/file_test.go b/util/file_test.go index 3f648b8..320f3d4 100644 --- a/util/file_test.go +++ b/util/file_test.go @@ -10,6 +10,8 @@ package util import ( "bytes" + "os" + "path/filepath" "testing" ) @@ -55,8 +57,54 @@ func TestConformingFileName(t *testing.T) { } } -func TestNWriter(t *testing.T) { +func TestIDMatchesFilename(t *testing.T) { + pathEval := NewPathEval() + doc := make(map[string]interface{}) + doc["document"] = map[string]interface{}{ + "tracking": map[string]interface{}{ + "id": "valid.json", + }, + } + + err := IDMatchesFilename(pathEval, doc, "valid.json") + if err != nil { + t.Errorf("IDMatchesFilename: Expected nil, got %q", err) + } + + err = IDMatchesFilename(pathEval, doc, "different_file_name.json") + if err == nil { + t.Error("IDMatchesFilename: Expected error, got nil") + } + + doc["document"] = map[string]interface{}{ + "tracking": map[string]interface{}{}, + } + err = IDMatchesFilename(pathEval, doc, "valid.json") + if err == nil { + t.Error("IDMatchesFilename: Expected error, got nil") + } +} + +func TestPathExists(t *testing.T) { + got, err := PathExists("/this/path/does/not/exist") + if err != nil { + t.Error(err) + } + if got != false { + t.Error("PathExists: Expected false, got true") + } + dir := t.TempDir() + got, err = PathExists(dir) + if err != nil { + t.Error(err) + } + if got != true { + t.Error("PathExists: Expected true, got false") + } +} + +func TestNWriter(t *testing.T) { msg := []byte("Gruß!\n") first, second := msg[:len(msg)/2], msg[len(msg)/2:] @@ -78,3 +126,94 @@ func TestNWriter(t *testing.T) { t.Errorf("Expected %q, but got %q", msg, out) } } + +func TestWriteToFile(t *testing.T) { + filename := filepath.Join(t.TempDir(), "test_file") + wt := bytes.NewBufferString("test_data") + err := WriteToFile(filename, wt) + if err != nil { + t.Error(err) + } + fileData, err := os.ReadFile(filename) + if err != nil { + t.Error(err) + } + if !bytes.Equal(fileData, []byte("test_data")) { + t.Errorf("DeepCopy: Expected test_data, got %v", fileData) + } +} + +func TestMakeUniqFile(t *testing.T) { + dir := t.TempDir() + _, file, err := MakeUniqFile(dir) + if err != nil { + t.Error(err) + } + _, err = file.Write([]byte("test_data")) + if err != nil { + t.Error(err) + } + err = file.Close() + if err != nil { + t.Error(err) + } +} + +func Test_mkUniq(t *testing.T) { + dir := t.TempDir() + name, err := mkUniq(dir+"/", func(name string) error { + return nil + }) + if err != nil { + t.Error(err) + } + firstTime := true + name1, err := mkUniq(dir+"/", func(_ string) error { + if firstTime { + firstTime = false + return os.ErrExist + } + return nil + }) + if err != nil { + t.Error(err) + } + if name == name1 { + t.Errorf("mkUniq: Expected unique names, got %v and %v", name, name1) + } +} + +func TestDeepCopy(t *testing.T) { + dir := t.TempDir() + os.MkdirAll(filepath.Join(dir, "src/folder0"), 0755) + os.MkdirAll(filepath.Join(dir, "dst"), 0755) + os.MkdirAll(filepath.Join(dir, "dst1"), 0755) + err := os.WriteFile(filepath.Join(dir, "src/folder0/test_file"), []byte("test_data"), 0755) + if err != nil { + t.Error(err) + } + + err = DeepCopy(filepath.Join(dir, "dst"), filepath.Join(dir, "src")) + if err != nil { + t.Error(err) + } + + fileData, err := os.ReadFile(filepath.Join(dir, "dst/folder0/test_file")) + if err != nil { + t.Error(err) + } + + if !bytes.Equal(fileData, []byte("test_data")) { + t.Errorf("DeepCopy: Expected test_data, got %v", fileData) + } + + err = DeepCopy("/path/does/not/exist", filepath.Join(dir, "src")) + if err == nil { + t.Error("DeepCopy: Expected error, got nil") + } + + err = DeepCopy(filepath.Join(dir, "dst1"), "/path/does/not/exist") + if err == nil { + t.Error("DeepCopy: Expected error, got nil") + } +} diff --git a/util/hash_test.go b/util/hash_test.go new file mode 100644 index 0000000..ed0f0b2 --- /dev/null +++ b/util/hash_test.go @@ -0,0 +1,109 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "hash" + "os" + "path/filepath" + "reflect" + "strings" + "testing" +) + +func TestHashFromReader(t *testing.T) { + r := strings.NewReader("deadbeef") + want := []byte{0xde, 0xad, 0xbe, 0xef} + if got, err := HashFromReader(r); !reflect.DeepEqual(want, got) { + if err != nil { + t.Error(err) + } + t.Errorf("HashFromReader: Expected %v, got %v", want, got) + } +} + +func TestHashFromFile(t *testing.T) { + dir := t.TempDir() + filePath := filepath.Join(dir, "test_file") + testFile, err := os.Create(filePath) + if err != nil { + t.Error(err) + } + + testFile.WriteString("deadbeef") + want := []byte{0xde, 0xad, 0xbe, 0xef} + + testFile.Close() + + if got, err := HashFromFile(filePath); !reflect.DeepEqual(want, got) { + if err != nil { + t.Error(err) + } + t.Errorf("HashFromFile: Expected %v, got %v", want, got) + } +} + +type deadbeefHash struct { + hash.Hash +} + +func (deadbeefHash) Write(p []byte) (int, error) { return len(p), nil } +func (deadbeefHash) Sum(_ []byte) []byte { return []byte{0xde, 0xad, 0xbe, 0xef} } + +func TestWriteHashToFile(t *testing.T) { + dir := t.TempDir() + filePath := filepath.Join(dir, "test_file") + + hashArg := deadbeefHash{} + nameArg := "name" + want := "deadbeef " + nameArg + "\n" + + err := WriteHashToFile(filePath, nameArg, hashArg, []byte{}) + if err != nil { + t.Error(err) + } + testFile, err := os.Open(filePath) + if err != nil { + t.Error(err) + } + defer testFile.Close() + fileContent, err := os.ReadFile(filePath) + if err != nil { + t.Error(err) + } + if got := string(fileContent); got != want { + t.Errorf("WriteHashToFile: Expected %v, got %v", want, got) + } +} + +func TestWriteHashSumToFile(t *testing.T) { + dir := t.TempDir() + filePath := filepath.Join(dir, "test_file") + + sum := []byte{0xde, 0xad, 0xbe, 0xef} + nameArg := "name" + want := "deadbeef " + nameArg + "\n" + + err := WriteHashSumToFile(filePath, nameArg, sum) + if err != nil { + t.Error(err) + } + testFile, err := os.Open(filePath) + if err != nil { + t.Error(err) + } + defer testFile.Close() + fileContent, err := os.ReadFile(filePath) + if err != nil { + t.Error(err) + } + if got := string(fileContent); got != want { + t.Errorf("WriteHashSumToFile: Expected %v, got %v", want, got) + } +} diff --git a/util/json_test.go b/util/json_test.go new file mode 100644 index 0000000..452fabe --- /dev/null +++ b/util/json_test.go @@ -0,0 +1,209 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "context" + "reflect" + "testing" + "time" +) + +func TestPathEval_Compile(t *testing.T) { + pathEval := NewPathEval() + eval, err := pathEval.Compile("foo") + if err != nil { + t.Error(err) + } + + // Check caching + eval1, err := pathEval.Compile("foo") + if err != nil { + t.Error(err) + } + if reflect.ValueOf(eval).Pointer() != reflect.ValueOf(eval1).Pointer() { + t.Error("PathEval_Compile: Expected cached eval") + } + + got, err := eval.EvalInt(context.Background(), map[string]interface{}{"foo": 5}) + if err != nil { + t.Error(err) + } + if got != 5 { + t.Errorf("PathEval_Compile: Expected 5, got %v", got) + } +} + +func TestPathEval_Eval(t *testing.T) { + pathEval := NewPathEval() + _, err := pathEval.Eval("foo", nil) + if err == nil { + t.Error("PathEval_Eval: Expected error, got nil") + } + got, err := pathEval.Eval("foo", map[string]interface{}{"foo": 5}) + if err != nil { + t.Error(err) + } + if got != 5 { + t.Errorf("PathEval_Compile: Expected 5, got %v", got) + } +} + +func TestReMarshalMatcher(t *testing.T) { + var intDst int + var uintSrc uint = 2 + remarshalFunc := ReMarshalMatcher(&intDst) + err := remarshalFunc(uintSrc) + if err != nil { + t.Error(err) + } + if intDst != 2 { + t.Errorf("ReMarshalMatcher: Expected %v, got %v", uintSrc, intDst) + } +} + +func TestBoolMatcher(t *testing.T) { + var boolDst bool + boolFunc := BoolMatcher(&boolDst) + err := boolFunc(true) + if err != nil { + t.Error(err) + } + + if boolDst != true { + t.Error("BoolMatcher: Expected true got false") + } + + err = boolFunc(1) + if err == nil { + t.Error("BoolMatcher: Expected error, got nil") + } +} + +func TestStringMatcher(t *testing.T) { + var stringDst string + stringFunc := StringMatcher(&stringDst) + err := stringFunc("test") + if err != nil { + t.Error(err) + } + + if stringDst != "test" { + t.Errorf("StringMatcher: Expected test, got %v", stringDst) + } + + err = stringFunc(1) + if err == nil { + t.Error("StringMatcher: Expected error, got nil") + } +} + +func TestStringTreeMatcher(t *testing.T) { + var stringTreeDst []string + stringTreeFunc := StringTreeMatcher(&stringTreeDst) + err := stringTreeFunc([]any{"a", "a", "b"}) + if err != nil { + t.Error(err) + } + + wantAnySlice := []any{"a", "b"} + if reflect.DeepEqual(stringTreeDst, wantAnySlice) { + t.Errorf("StringTreeMatcher: Expected %v, got %v", wantAnySlice, stringTreeDst) + } + + err = stringTreeFunc([]string{"a", "a", "b"}) + if err == nil { + t.Error("StringTreeMatcher: Expected error, got nil") + } + + err = stringTreeFunc(1) + if err == nil { + t.Error("StringTreeMatcher: Expected error, got nil") + } +} + +func TestTimeMatcher(t *testing.T) { + var timeDst time.Time + timeFunc := TimeMatcher(&timeDst, time.RFC3339) + err := timeFunc("2024-03-18T12:57:48.236Z") + if err != nil { + t.Error(err) + } + wantTime := time.Date(2024, time.March, 18, 12, 57, 48, 236_000_000, time.UTC) + if timeDst != wantTime { + t.Errorf("TimeMatcher: Expected %v, got %v", wantTime, timeDst) + } + + err = timeFunc("") + if err == nil { + t.Error("TimeMatcher: Expected error, got nil") + } + + err = timeFunc(1) + if err == nil { + t.Error("TimeMatcher: Expected error, got nil") + } +} + +func TestPathEval_Extract(t *testing.T) { + pathEval := NewPathEval() + var result string + matcher := StringMatcher(&result) + err := pathEval.Extract("foo", matcher, true, map[string]interface{}{"foo": "bar"}) + if err != nil { + t.Error(err) + } + if result != "bar" { + t.Errorf("PathEval_Extract: Expected bar, got %v", result) + } +} + +func TestPathEval_Match(t *testing.T) { + var got string + doc := map[string]interface{}{"foo": "bar"} + + pe := NewPathEval() + pem := PathEvalMatcher{Expr: "foo", Action: StringMatcher(&got)} + + err := pe.Match([]PathEvalMatcher{pem}, doc) + if err != nil { + t.Error(err) + } + if got != "bar" { + t.Errorf("PathEval_Match: Expected bar, got %v", got) + } +} + +func TestPathEval_Strings(t *testing.T) { + pe := NewPathEval() + doc := map[string]interface{}{"foo": "bar"} + want := []string{"bar"} + + got, err := pe.Strings([]string{"foo"}, true, doc) + if err != nil { + t.Error(err) + } + + if !reflect.DeepEqual(got, want) { + t.Errorf("PathEval_Strings: Expected %v, got %v", want, got) + } +} + +func TestAsStrings(t *testing.T) { + arg := []interface{}{"foo", "bar"} + want := []string{"foo", "bar"} + + got, valid := AsStrings(arg) + if !valid { + t.Error("AsStrings: Expected true, got false") + } + if !reflect.DeepEqual(got, want) { + t.Errorf("AsStrings: Expected %v, got %v", want, got) + } +} diff --git a/util/set_test.go b/util/set_test.go new file mode 100644 index 0000000..a28878e --- /dev/null +++ b/util/set_test.go @@ -0,0 +1,65 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "reflect" + "sort" + "testing" +) + +func TestSet(t *testing.T) { + s := Set[int]{} + if s.Contains(0) { + t.Error("Set.Contains: Expected false got true") + } + s.Add(0) + if !s.Contains(0) { + t.Error("Set.Contains: Expected true got false") + } + + s0 := Set[int]{} + s1 := Set[int]{} + + s0.Add(0) + s0.Add(1) + + s1.Add(0) + s1.Add(1) + s1.Add(2) + + diff0 := s0.Difference(s1) + diff1 := s1.Difference(s0) + + if reflect.DeepEqual(diff0, diff1) { + t.Errorf("Set.Difference: %q and %q are different", diff0, diff1) + } + + if s0.ContainsAll(s1) { + t.Error("Set.ContainsAll: Expected false got true") + } + + if !s1.ContainsAll(s0) { + t.Error("Set.ContainsAll: Expected true got false") + } + + s2 := Set[int]{} + s2.Add(0) + s2.Add(1) + s2.Add(2) + s2.Add(3) + + wantKeys := []int{0, 1, 2, 3} + gotKeys := s2.Keys() + sort.Ints(gotKeys) + + if !reflect.DeepEqual(wantKeys, gotKeys) { + t.Errorf("Set.Keys: Expected %q got %q", wantKeys, gotKeys) + } +} diff --git a/util/url_test.go b/util/url_test.go new file mode 100644 index 0000000..dec73dc --- /dev/null +++ b/util/url_test.go @@ -0,0 +1,36 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "net/url" + "testing" +) + +func TestBaseUrl(t *testing.T) { + for _, x := range [][2]string{ + {`http://example.com`, `http://example.com/`}, + {`scheme://example.com`, `scheme://example.com/`}, + {`https://example.com`, `https://example.com/`}, + {`https://example.com:8080/`, `https://example.com:8080/`}, + {`https://user@example.com:8080/`, `https://user@example.com:8080/`}, + {`https://user@example.com:8080/resource`, `https://user@example.com:8080/`}, + {`https://user@example.com:8080/resource/`, `https://user@example.com:8080/resource/`}, + {`https://user@example.com:8080/resource/#fragment`, `https://user@example.com:8080/resource/`}, + {`https://user@example.com:8080/resource/?query=test#fragment`, `https://user@example.com:8080/resource/`}, + } { + url, _ := url.Parse(x[0]) + if got, err := BaseURL(url); got != x[1] { + if err != nil { + t.Error(err) + } + t.Errorf("%q: Expected %q but got %q.", x[0], x[1], got) + } + } +} From e2ad3d3f8302a81be9fe4d20153aac2f0dc041bd Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Fri, 21 Jun 2024 14:02:51 +0200 Subject: [PATCH 025/117] docs: fix licensing info for generated files (#542) * docs: fix licensing info for generated files * change generate_cvss_enums.go to note that the input file is relevant for the license. * change license and copyright of cvss20enums.go and cvss3enums.go to BSD-3-Clause and FIRST. * add reuse.software 3.0 compatible files for the schema cvss files. * Stamp right license into generated files. --------- Co-authored-by: Sascha L. Teichmann --- LICENSES/BSD-3-Clause.txt | 11 +++++++++++ csaf/cvss20enums.go | 9 ++------- csaf/cvss3enums.go | 9 ++------- csaf/generate_cvss_enums.go | 28 +++++++++++++++++++++------- csaf/schema/cvss-v2.0.json.license | 2 ++ csaf/schema/cvss-v3.0.json.license | 2 ++ csaf/schema/cvss-v3.1.json.license | 2 ++ 7 files changed, 42 insertions(+), 21 deletions(-) create mode 100644 LICENSES/BSD-3-Clause.txt create mode 100644 csaf/schema/cvss-v2.0.json.license create mode 100644 csaf/schema/cvss-v3.0.json.license create mode 100644 csaf/schema/cvss-v3.1.json.license diff --git a/LICENSES/BSD-3-Clause.txt b/LICENSES/BSD-3-Clause.txt new file mode 100644 index 0000000..ea890af --- /dev/null +++ b/LICENSES/BSD-3-Clause.txt @@ -0,0 +1,11 @@ +Copyright (c) . + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/csaf/cvss20enums.go b/csaf/cvss20enums.go index 7056f3e..97d2e10 100644 --- a/csaf/cvss20enums.go +++ b/csaf/cvss20enums.go @@ -1,10 +1,5 @@ -// This file is Free Software under the Apache-2.0 License -// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. -// -// SPDX-License-Identifier: Apache-2.0 -// -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH +// SPDX-License-Identifier: BSD-3-Clause +// SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. // // THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! diff --git a/csaf/cvss3enums.go b/csaf/cvss3enums.go index b8cf54f..32e01e3 100644 --- a/csaf/cvss3enums.go +++ b/csaf/cvss3enums.go @@ -1,10 +1,5 @@ -// This file is Free Software under the Apache-2.0 License -// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. -// -// SPDX-License-Identifier: Apache-2.0 -// -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH +// SPDX-License-Identifier: BSD-3-Clause +// SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. // // THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! diff --git a/csaf/generate_cvss_enums.go b/csaf/generate_cvss_enums.go index 7c9b9fd..c84ab15 100644 --- a/csaf/generate_cvss_enums.go +++ b/csaf/generate_cvss_enums.go @@ -14,21 +14,21 @@ import ( "bytes" "encoding/json" "flag" + "fmt" "go/format" "log" "os" + "regexp" "sort" "strings" "text/template" ) -const tmplText = `// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. -// -// SPDX-License-Identifier: MIT -// -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH +// We from Intevation consider the source code parts in the following +// template file as too insignificant to be a piece of work that gains +// "copyrights" protection in the European Union. So the license(s) +// of the output files are fully determined by the input file. +const tmplText = `// {{ $.License }} // // THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! @@ -69,6 +69,7 @@ type definition struct { } type schema struct { + License []string `json:"license"` Definitions map[string]*definition `json:"definitions"` } @@ -137,9 +138,22 @@ func main() { } sort.Strings(defs) + license := "determine license(s) from input file and replace this line" + + pattern := regexp.MustCompile(`Copyright \(c\) (\d+), FIRST.ORG, INC.`) + for _, line := range s.License { + if m := pattern.FindStringSubmatch(line); m != nil { + license = fmt.Sprintf( + "SPDX-License-Identifier: BSD-3-Clause\n"+ + "// SPDX-FileCopyrightText: %s FIRST.ORG, INC.", m[1]) + break + } + } + var source bytes.Buffer check(tmpl.Execute(&source, map[string]any{ + "License": license, "Prefix": *prefix, "Definitions": s.Definitions, "Keys": defs, diff --git a/csaf/schema/cvss-v2.0.json.license b/csaf/schema/cvss-v2.0.json.license new file mode 100644 index 0000000..dd033e8 --- /dev/null +++ b/csaf/schema/cvss-v2.0.json.license @@ -0,0 +1,2 @@ +SPDX-License-Identifier: BSD-3-Clause +SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. diff --git a/csaf/schema/cvss-v3.0.json.license b/csaf/schema/cvss-v3.0.json.license new file mode 100644 index 0000000..dd033e8 --- /dev/null +++ b/csaf/schema/cvss-v3.0.json.license @@ -0,0 +1,2 @@ +SPDX-License-Identifier: BSD-3-Clause +SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. diff --git a/csaf/schema/cvss-v3.1.json.license b/csaf/schema/cvss-v3.1.json.license new file mode 100644 index 0000000..f87ced8 --- /dev/null +++ b/csaf/schema/cvss-v3.1.json.license @@ -0,0 +1,2 @@ +SPDX-License-Identifier: BSD-3-Clause +SPDX-FileCopyrightText: 2021 FIRST.ORG, INC. From 56fadc3a80f66d0006203e9983138c5171b07fbf Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Fri, 21 Jun 2024 14:04:20 +0200 Subject: [PATCH 026/117] docs: fix typo in examples/aggregator.toml (#539) --- docs/examples/aggregator.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/examples/aggregator.toml b/docs/examples/aggregator.toml index ae1723d..2161079 100644 --- a/docs/examples/aggregator.toml +++ b/docs/examples/aggregator.toml @@ -51,7 +51,7 @@ insecure = true # rate = 1.8 # insecure = true write_indices = true - # If aggregator.category == "aggreator", set for an entry that should + # If aggregator.category == "aggregator", set for an entry that should # be listed in addition: category = "lister" # ignore_pattern = [".*white.*", ".*red.*"] From 3084cdbc371f03adfe22c1640b53b43fed5a0563 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Fri, 21 Jun 2024 15:35:30 +0200 Subject: [PATCH 027/117] Address comments --- util/csv_test.go | 6 ++---- util/file_test.go | 54 ++++++++++++++++++++++------------------------- util/hash_test.go | 6 ++---- util/json_test.go | 49 ++++++++++++++++-------------------------- 4 files changed, 47 insertions(+), 68 deletions(-) diff --git a/util/csv_test.go b/util/csv_test.go index a744b75..575d83d 100644 --- a/util/csv_test.go +++ b/util/csv_test.go @@ -17,15 +17,13 @@ func TestCSV(t *testing.T) { buf := new(bytes.Buffer) csvWriter := NewFullyQuotedCSWWriter(buf) for _, x := range [][]string{{"a", "b", "c"}, {"d", "e", "f"}} { - err := csvWriter.Write(x) - if err != nil { + if err := csvWriter.Write(x); err != nil { t.Error(err) } } csvWriter.Flush() - err := csvWriter.Error() - if err != nil { + if err := csvWriter.Error(); err != nil { t.Error(err) } for _, want := range []string{`"a","b","c"`, `"d","e","f"`} { diff --git a/util/file_test.go b/util/file_test.go index 320f3d4..28c5196 100644 --- a/util/file_test.go +++ b/util/file_test.go @@ -60,28 +60,25 @@ func TestConformingFileName(t *testing.T) { func TestIDMatchesFilename(t *testing.T) { pathEval := NewPathEval() - doc := make(map[string]interface{}) - doc["document"] = map[string]interface{}{ - "tracking": map[string]interface{}{ + doc := make(map[string]any) + doc["document"] = map[string]any{ + "tracking": map[string]any{ "id": "valid.json", }, } - err := IDMatchesFilename(pathEval, doc, "valid.json") - if err != nil { + if err := IDMatchesFilename(pathEval, doc, "valid.json"); err != nil { t.Errorf("IDMatchesFilename: Expected nil, got %q", err) } - err = IDMatchesFilename(pathEval, doc, "different_file_name.json") - if err == nil { + if err := IDMatchesFilename(pathEval, doc, "different_file_name.json"); err == nil { t.Error("IDMatchesFilename: Expected error, got nil") } - doc["document"] = map[string]interface{}{ - "tracking": map[string]interface{}{}, + doc["document"] = map[string]any{ + "tracking": map[string]any{}, } - err = IDMatchesFilename(pathEval, doc, "valid.json") - if err == nil { + if err := IDMatchesFilename(pathEval, doc, "valid.json"); err == nil { t.Error("IDMatchesFilename: Expected error, got nil") } } @@ -130,8 +127,7 @@ func TestNWriter(t *testing.T) { func TestWriteToFile(t *testing.T) { filename := filepath.Join(t.TempDir(), "test_file") wt := bytes.NewBufferString("test_data") - err := WriteToFile(filename, wt) - if err != nil { + if err := WriteToFile(filename, wt); err != nil { t.Error(err) } fileData, err := os.ReadFile(filename) @@ -149,12 +145,10 @@ func TestMakeUniqFile(t *testing.T) { if err != nil { t.Error(err) } - _, err = file.Write([]byte("test_data")) - if err != nil { + if _, err = file.Write([]byte("test_data")); err != nil { t.Error(err) } - err = file.Close() - if err != nil { + if err = file.Close(); err != nil { t.Error(err) } } @@ -185,16 +179,20 @@ func Test_mkUniq(t *testing.T) { func TestDeepCopy(t *testing.T) { dir := t.TempDir() - os.MkdirAll(filepath.Join(dir, "src/folder0"), 0755) - os.MkdirAll(filepath.Join(dir, "dst"), 0755) - os.MkdirAll(filepath.Join(dir, "dst1"), 0755) - err := os.WriteFile(filepath.Join(dir, "src/folder0/test_file"), []byte("test_data"), 0755) - if err != nil { - t.Error(err) + if err := os.MkdirAll(filepath.Join(dir, "src/folder0"), 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(filepath.Join(dir, "dst"), 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(filepath.Join(dir, "dst1"), 0755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(dir, "src/folder0/test_file"), []byte("test_data"), 0755); err != nil { + t.Fatal(err) } - err = DeepCopy(filepath.Join(dir, "dst"), filepath.Join(dir, "src")) - if err != nil { + if err := DeepCopy(filepath.Join(dir, "dst"), filepath.Join(dir, "src")); err != nil { t.Error(err) } @@ -207,13 +205,11 @@ func TestDeepCopy(t *testing.T) { t.Errorf("DeepCopy: Expected test_data, got %v", fileData) } - err = DeepCopy("/path/does/not/exist", filepath.Join(dir, "src")) - if err == nil { + if err = DeepCopy("/path/does/not/exist", filepath.Join(dir, "src")); err == nil { t.Error("DeepCopy: Expected error, got nil") } - err = DeepCopy(filepath.Join(dir, "dst1"), "/path/does/not/exist") - if err == nil { + if err = DeepCopy(filepath.Join(dir, "dst1"), "/path/does/not/exist"); err == nil { t.Error("DeepCopy: Expected error, got nil") } } diff --git a/util/hash_test.go b/util/hash_test.go index ed0f0b2..d690891 100644 --- a/util/hash_test.go +++ b/util/hash_test.go @@ -64,8 +64,7 @@ func TestWriteHashToFile(t *testing.T) { nameArg := "name" want := "deadbeef " + nameArg + "\n" - err := WriteHashToFile(filePath, nameArg, hashArg, []byte{}) - if err != nil { + if err := WriteHashToFile(filePath, nameArg, hashArg, []byte{}); err != nil { t.Error(err) } testFile, err := os.Open(filePath) @@ -90,8 +89,7 @@ func TestWriteHashSumToFile(t *testing.T) { nameArg := "name" want := "deadbeef " + nameArg + "\n" - err := WriteHashSumToFile(filePath, nameArg, sum) - if err != nil { + if err := WriteHashSumToFile(filePath, nameArg, sum); err != nil { t.Error(err) } testFile, err := os.Open(filePath) diff --git a/util/json_test.go b/util/json_test.go index 452fabe..ba18171 100644 --- a/util/json_test.go +++ b/util/json_test.go @@ -31,7 +31,7 @@ func TestPathEval_Compile(t *testing.T) { t.Error("PathEval_Compile: Expected cached eval") } - got, err := eval.EvalInt(context.Background(), map[string]interface{}{"foo": 5}) + got, err := eval.EvalInt(context.Background(), map[string]any{"foo": 5}) if err != nil { t.Error(err) } @@ -46,7 +46,7 @@ func TestPathEval_Eval(t *testing.T) { if err == nil { t.Error("PathEval_Eval: Expected error, got nil") } - got, err := pathEval.Eval("foo", map[string]interface{}{"foo": 5}) + got, err := pathEval.Eval("foo", map[string]any{"foo": 5}) if err != nil { t.Error(err) } @@ -59,8 +59,7 @@ func TestReMarshalMatcher(t *testing.T) { var intDst int var uintSrc uint = 2 remarshalFunc := ReMarshalMatcher(&intDst) - err := remarshalFunc(uintSrc) - if err != nil { + if err := remarshalFunc(uintSrc); err != nil { t.Error(err) } if intDst != 2 { @@ -71,8 +70,7 @@ func TestReMarshalMatcher(t *testing.T) { func TestBoolMatcher(t *testing.T) { var boolDst bool boolFunc := BoolMatcher(&boolDst) - err := boolFunc(true) - if err != nil { + if err := boolFunc(true); err != nil { t.Error(err) } @@ -80,8 +78,7 @@ func TestBoolMatcher(t *testing.T) { t.Error("BoolMatcher: Expected true got false") } - err = boolFunc(1) - if err == nil { + if err := boolFunc(1); err == nil { t.Error("BoolMatcher: Expected error, got nil") } } @@ -89,8 +86,7 @@ func TestBoolMatcher(t *testing.T) { func TestStringMatcher(t *testing.T) { var stringDst string stringFunc := StringMatcher(&stringDst) - err := stringFunc("test") - if err != nil { + if err := stringFunc("test"); err != nil { t.Error(err) } @@ -98,8 +94,7 @@ func TestStringMatcher(t *testing.T) { t.Errorf("StringMatcher: Expected test, got %v", stringDst) } - err = stringFunc(1) - if err == nil { + if err := stringFunc(1); err == nil { t.Error("StringMatcher: Expected error, got nil") } } @@ -107,8 +102,7 @@ func TestStringMatcher(t *testing.T) { func TestStringTreeMatcher(t *testing.T) { var stringTreeDst []string stringTreeFunc := StringTreeMatcher(&stringTreeDst) - err := stringTreeFunc([]any{"a", "a", "b"}) - if err != nil { + if err := stringTreeFunc([]any{"a", "a", "b"}); err != nil { t.Error(err) } @@ -117,13 +111,11 @@ func TestStringTreeMatcher(t *testing.T) { t.Errorf("StringTreeMatcher: Expected %v, got %v", wantAnySlice, stringTreeDst) } - err = stringTreeFunc([]string{"a", "a", "b"}) - if err == nil { + if err := stringTreeFunc([]string{"a", "a", "b"}); err == nil { t.Error("StringTreeMatcher: Expected error, got nil") } - err = stringTreeFunc(1) - if err == nil { + if err := stringTreeFunc(1); err == nil { t.Error("StringTreeMatcher: Expected error, got nil") } } @@ -131,8 +123,7 @@ func TestStringTreeMatcher(t *testing.T) { func TestTimeMatcher(t *testing.T) { var timeDst time.Time timeFunc := TimeMatcher(&timeDst, time.RFC3339) - err := timeFunc("2024-03-18T12:57:48.236Z") - if err != nil { + if err := timeFunc("2024-03-18T12:57:48.236Z"); err != nil { t.Error(err) } wantTime := time.Date(2024, time.March, 18, 12, 57, 48, 236_000_000, time.UTC) @@ -140,13 +131,11 @@ func TestTimeMatcher(t *testing.T) { t.Errorf("TimeMatcher: Expected %v, got %v", wantTime, timeDst) } - err = timeFunc("") - if err == nil { + if err := timeFunc(""); err == nil { t.Error("TimeMatcher: Expected error, got nil") } - err = timeFunc(1) - if err == nil { + if err := timeFunc(1); err == nil { t.Error("TimeMatcher: Expected error, got nil") } } @@ -155,8 +144,7 @@ func TestPathEval_Extract(t *testing.T) { pathEval := NewPathEval() var result string matcher := StringMatcher(&result) - err := pathEval.Extract("foo", matcher, true, map[string]interface{}{"foo": "bar"}) - if err != nil { + if err := pathEval.Extract("foo", matcher, true, map[string]any{"foo": "bar"}); err != nil { t.Error(err) } if result != "bar" { @@ -166,13 +154,12 @@ func TestPathEval_Extract(t *testing.T) { func TestPathEval_Match(t *testing.T) { var got string - doc := map[string]interface{}{"foo": "bar"} + doc := map[string]any{"foo": "bar"} pe := NewPathEval() pem := PathEvalMatcher{Expr: "foo", Action: StringMatcher(&got)} - err := pe.Match([]PathEvalMatcher{pem}, doc) - if err != nil { + if err := pe.Match([]PathEvalMatcher{pem}, doc); err != nil { t.Error(err) } if got != "bar" { @@ -182,7 +169,7 @@ func TestPathEval_Match(t *testing.T) { func TestPathEval_Strings(t *testing.T) { pe := NewPathEval() - doc := map[string]interface{}{"foo": "bar"} + doc := map[string]any{"foo": "bar"} want := []string{"bar"} got, err := pe.Strings([]string{"foo"}, true, doc) @@ -196,7 +183,7 @@ func TestPathEval_Strings(t *testing.T) { } func TestAsStrings(t *testing.T) { - arg := []interface{}{"foo", "bar"} + arg := []any{"foo", "bar"} want := []string{"foo", "bar"} got, valid := AsStrings(arg) From 5c6736b178b113f6abc2cad6efd9301d5fbbe18e Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 24 Jun 2024 11:57:38 +0200 Subject: [PATCH 028/117] Remove data races in downloader caused by shared use of json path eval. (#547) --- cmd/csaf_downloader/downloader.go | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index c5c3e02..a0cf34e 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -40,7 +40,6 @@ import ( type downloader struct { cfg *config keys *crypto.KeyRing - eval *util.PathEval validator csaf.RemoteValidator forwarder *forwarder mkdirMu sync.Mutex @@ -73,7 +72,6 @@ func newDownloader(cfg *config) (*downloader, error) { return &downloader{ cfg: cfg, - eval: util.NewPathEval(), validator: validator, }, nil } @@ -218,17 +216,20 @@ func (d *downloader) download(ctx context.Context, domain string) error { return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err) } + expr := util.NewPathEval() + if err := d.loadOpenPGPKeys( client, lpmd.Document, base, + expr, ); err != nil { return err } afp := csaf.NewAdvisoryFileProcessor( client, - d.eval, + expr, lpmd.Document, base) @@ -297,9 +298,10 @@ func (d *downloader) loadOpenPGPKeys( client util.Client, doc any, base *url.URL, + expr *util.PathEval, ) error { - src, err := d.eval.Eval("$.public_openpgp_keys", doc) + src, err := expr.Eval("$.public_openpgp_keys", doc) if err != nil { // no keys. return nil @@ -421,6 +423,7 @@ func (d *downloader) downloadWorker( dateExtract = util.TimeMatcher(&initialReleaseDate, time.RFC3339) lower = strings.ToLower(string(label)) stats = stats{} + expr = util.NewPathEval() ) // Add collected stats back to total. @@ -588,7 +591,7 @@ nextAdvisory: // Validate if filename is conforming. filenameCheck := func() error { - if err := util.IDMatchesFilename(d.eval, doc, filename); err != nil { + if err := util.IDMatchesFilename(expr, doc, filename); err != nil { stats.filenameFailed++ return fmt.Errorf("filename not conforming %s: %s", file.URL(), err) } @@ -651,7 +654,7 @@ nextAdvisory: continue } - if err := d.eval.Extract( + if err := expr.Extract( `$.document.tracking.initial_release_date`, dateExtract, false, doc, ); err != nil { slog.Warn("Cannot extract initial_release_date from advisory", From a46c286cf482451e8f395d367ef8ad3c705cdfd4 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Fri, 19 Apr 2024 14:04:12 +0200 Subject: [PATCH 029/117] fix: don't drop error messages from loading provider-metadata.json previously in case case of trying last resort dns, all other error messages were dropped --- csaf/providermetaloader.go | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index 203f2b3..0c4fc3b 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -173,6 +173,8 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata // We have a candidate. if wellknownResult.Valid() { wellknownGood = wellknownResult + } else { + pmdl.messages.AppendUnique(wellknownResult.Messages) } // Next load the PMDs from security.txt @@ -220,25 +222,28 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata } } // Take the good well-known. - wellknownGood.Messages.AppendUnique(pmdl.messages) + wellknownGood.Messages = pmdl.messages return wellknownGood } // Don't have well-known. Take first good from security.txt. ignoreExtras() - secGoods[0].Messages.AppendUnique(pmdl.messages) + secGoods[0].Messages = pmdl.messages return secGoods[0] } // If we have a good well-known take it. if wellknownGood != nil { - wellknownGood.Messages.AppendUnique(pmdl.messages) + wellknownGood.Messages = pmdl.messages return wellknownGood } // Last resort: fall back to DNS. dnsURL := "https://csaf.data.security." + domain - return pmdl.loadFromURL(dnsURL) + dnsURLResult := pmdl.loadFromURL(dnsURL) + pmdl.messages.AppendUnique(dnsURLResult.Messages) // keep order of messages consistent (i.e. last occurred message is last element) + dnsURLResult.Messages = pmdl.messages + return dnsURLResult } // loadFromSecurity loads the PMDs mentioned in the security.txt. From 51dc9b5bcb26c74bc3e46f3c9cf0e7d190cc41d1 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Fri, 19 Apr 2024 14:06:56 +0200 Subject: [PATCH 030/117] refactor: deduplicate filtering pmd results from security.txt already done in `loadFromSecurity` --- csaf/providermetaloader.go | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index 0c4fc3b..b21ddc6 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -178,20 +178,7 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata } // Next load the PMDs from security.txt - secResults := pmdl.loadFromSecurity(domain) - - // Filter out the results which are valid. - var secGoods []*LoadedProviderMetadata - - for _, result := range secResults { - if len(result.Messages) > 0 { - // If there where validation issues append them - // to the overall report - pmdl.messages.AppendUnique(pmdl.messages) - } else { - secGoods = append(secGoods, result) - } - } + secGoods := pmdl.loadFromSecurity(domain) // Mention extra CSAF entries in security.txt. ignoreExtras := func() { @@ -246,7 +233,7 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata return dnsURLResult } -// loadFromSecurity loads the PMDs mentioned in the security.txt. +// loadFromSecurity loads the PMDs mentioned in the security.txt. Only valid PMDs are returned. func (pmdl *ProviderMetadataLoader) loadFromSecurity(domain string) []*LoadedProviderMetadata { // If .well-known fails try legacy location. From 1e531de82d35ab549fa4b07f828f21a38554c3a5 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Mon, 15 Jul 2024 10:52:13 +0200 Subject: [PATCH 031/117] fix: don't require debug level to print error details on failed loading of provider metadata json --- cmd/csaf_aggregator/processor.go | 14 +++++++++----- cmd/csaf_downloader/downloader.go | 13 ++++++++----- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index 9f10a77..5cb3628 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -89,17 +89,21 @@ func (w *worker) locateProviderMetadata(domain string) error { lpmd := loader.Load(domain) - if w.processor.cfg.Verbose { + if !lpmd.Valid() { for i := range lpmd.Messages { - w.log.Info( + w.log.Error( "Loading provider-metadata.json", "domain", domain, "message", lpmd.Messages[i].Message) } - } - - if !lpmd.Valid() { return fmt.Errorf("no valid provider-metadata.json found for '%s'", domain) + } else if w.processor.cfg.Verbose { + for i := range lpmd.Messages { + w.log.Debug( + "Loading provider-metadata.json", + "domain", domain, + "message", lpmd.Messages[i].Message) + } } w.metadataProvider = lpmd.Document diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index a0cf34e..e370f55 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -199,7 +199,14 @@ func (d *downloader) download(ctx context.Context, domain string) error { lpmd := loader.Load(domain) - if d.cfg.verbose() { + if !lpmd.Valid() { + for i := range lpmd.Messages { + slog.Error("Loading provider-metadata.json", + "domain", domain, + "message", lpmd.Messages[i].Message) + } + return fmt.Errorf("no valid provider-metadata.json found for '%s': ", domain) + } else if d.cfg.verbose() { for i := range lpmd.Messages { slog.Debug("Loading provider-metadata.json", "domain", domain, @@ -207,10 +214,6 @@ func (d *downloader) download(ctx context.Context, domain string) error { } } - if !lpmd.Valid() { - return fmt.Errorf("no valid provider-metadata.json found for '%s'", domain) - } - base, err := url.Parse(lpmd.URL) if err != nil { return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err) From bcf4d2f64aa267efe0e4cbf1a844d130fb708d23 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Tue, 16 Jul 2024 12:00:09 +0200 Subject: [PATCH 032/117] fix error message The error message had a trailing `:` which suggest that there are some details which were truncated. However the details are already printed before in the log. --- cmd/csaf_downloader/downloader.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index e370f55..badf060 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -205,7 +205,7 @@ func (d *downloader) download(ctx context.Context, domain string) error { "domain", domain, "message", lpmd.Messages[i].Message) } - return fmt.Errorf("no valid provider-metadata.json found for '%s': ", domain) + return fmt.Errorf("no valid provider-metadata.json found for '%s'", domain) } else if d.cfg.verbose() { for i := range lpmd.Messages { slog.Debug("Loading provider-metadata.json", From a131b0fb4bc97592d8ac4d80280706359b2a6811 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 25 Jul 2024 15:36:54 +0200 Subject: [PATCH 033/117] Improve SHA* marking --- cmd/csaf_checker/processor.go | 45 ++++++++++++++--- cmd/csaf_downloader/downloader.go | 34 ++++++++----- csaf/advisories.go | 83 +++++++++++++++---------------- 3 files changed, 99 insertions(+), 63 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 451a315..de42e18 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -20,6 +20,7 @@ import ( "fmt" "io" "log" + "log/slog" "net/http" "net/url" "path/filepath" @@ -138,7 +139,7 @@ func (m *topicMessages) info(format string, args ...any) { m.add(InfoType, format, args...) } -// use signals that we going to use this topic. +// use signals that we're going to use this topic. func (m *topicMessages) use() { if *m == nil { *m = []Message{} @@ -164,7 +165,7 @@ func (m *topicMessages) hasErrors() bool { return false } -// newProcessor returns an initilaized processor. +// newProcessor returns an initialized processor. func newProcessor(cfg *config) (*processor, error) { var validator csaf.RemoteValidator @@ -594,10 +595,15 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { var file csaf.AdvisoryFile - if sha256 != "" || sha512 != "" || sign != "" { - file = csaf.HashedAdvisoryFile{url, sha256, sha512, sign} - } else { - file = csaf.PlainAdvisoryFile(url) + switch { + case sha256 == "" && sha512 == "": + slog.Error("No hash listed on ROLIE feed", "file", url) + return + case sign == "": + slog.Error("No signature listed on ROLIE feed", "file", url) + return + default: + file = csaf.PlainAdvisoryFile{Path: url, SHA256: sha256, SHA512: sha512, Sign: sign} } files = append(files, file) @@ -888,7 +894,16 @@ func (p *processor) checkIndex(base string, mask whereType) error { p.badIntegrities.error("index.txt contains invalid URL %q in line %d", u, line) continue } - files = append(files, csaf.PlainAdvisoryFile(u)) + + SHA256 := p.checkURL(u + ".sha256") + SHA512 := p.checkURL(u + ".sha512") + sign := p.checkURL(u + ".asc") + files = append(files, csaf.PlainAdvisoryFile{ + Path: u, + SHA256: SHA256, + SHA512: SHA512, + Sign: sign, + }) } return files, scanner.Err() }() @@ -906,6 +921,15 @@ func (p *processor) checkIndex(base string, mask whereType) error { return p.integrity(files, base, mask, p.badIndices.add) } +// checkURL returns the URL if it is accessible. +func (p *processor) checkURL(url string) string { + _, err := p.client.Head(url) + if err != nil { + return url + } + return "" +} + // checkChanges fetches the "changes.csv" and calls the "checkTLS" method for HTTPs checks. // It extracts the file content, tests the column number and the validity of the time format // of the fields' values and if they are sorted properly. Then it passes the files to the @@ -970,9 +994,14 @@ func (p *processor) checkChanges(base string, mask whereType) error { continue } path := r[pathColumn] + + SHA256 := p.checkURL(path + ".sha256") + SHA512 := p.checkURL(path + ".sha512") + sign := p.checkURL(path + ".asc") + times, files = append(times, t), - append(files, csaf.PlainAdvisoryFile(path)) + append(files, csaf.PlainAdvisoryFile{Path: path, SHA256: SHA256, SHA512: SHA512, Sign: sign}) } return times, files, nil }() diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index badf060..025ed65 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -501,23 +501,31 @@ nextAdvisory: signData []byte ) - // Only hash when we have a remote counter part we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { - slog.Warn("Cannot fetch SHA256", - "url", file.SHA256URL(), - "error", err) + if file.SHA256URL() == "" { + slog.Info("SHA256 not present", "file", file.URL()) } else { - s256 = sha256.New() - writers = append(writers, s256) + // Only hash when we have a remote counterpart we can compare it with. + if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + slog.Warn("Cannot fetch SHA256", + "url", file.SHA256URL(), + "error", err) + } else { + s256 = sha256.New() + writers = append(writers, s256) + } } - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { - slog.Warn("Cannot fetch SHA512", - "url", file.SHA512URL(), - "error", err) + if file.SHA512URL() == "" { + slog.Info("SHA512 not present", "file", file.URL()) } else { - s512 = sha512.New() - writers = append(writers, s512) + if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + slog.Warn("Cannot fetch SHA512", + "url", file.SHA512URL(), + "error", err) + } else { + s512 = sha512.New() + writers = append(writers, s512) + } } // Remember the data as we need to store it to file later. diff --git a/csaf/advisories.go b/csaf/advisories.go index 6f07648..4aa7f52 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -34,55 +34,30 @@ type AdvisoryFile interface { // PlainAdvisoryFile is a simple implementation of checkFile. // The hash and signature files are directly constructed by extending // the file name. -type PlainAdvisoryFile string +type PlainAdvisoryFile struct { + Path string + SHA256 string + SHA512 string + Sign string +} // URL returns the URL of this advisory. -func (paf PlainAdvisoryFile) URL() string { return string(paf) } +func (paf PlainAdvisoryFile) URL() string { return paf.Path } // SHA256URL returns the URL of SHA256 hash file of this advisory. -func (paf PlainAdvisoryFile) SHA256URL() string { return string(paf) + ".sha256" } +func (paf PlainAdvisoryFile) SHA256URL() string { return paf.SHA256 } // SHA512URL returns the URL of SHA512 hash file of this advisory. -func (paf PlainAdvisoryFile) SHA512URL() string { return string(paf) + ".sha512" } +func (paf PlainAdvisoryFile) SHA512URL() string { return paf.SHA512 } // SignURL returns the URL of signature file of this advisory. -func (paf PlainAdvisoryFile) SignURL() string { return string(paf) + ".asc" } +func (paf PlainAdvisoryFile) SignURL() string { return paf.Sign } // LogValue implements [slog.LogValuer] func (paf PlainAdvisoryFile) LogValue() slog.Value { return slog.GroupValue(slog.String("url", paf.URL())) } -// HashedAdvisoryFile is a more involed version of checkFile. -// Here each component can be given explicitly. -// If a component is not given it is constructed by -// extending the first component. -type HashedAdvisoryFile [4]string - -func (haf HashedAdvisoryFile) name(i int, ext string) string { - if haf[i] != "" { - return haf[i] - } - return haf[0] + ext -} - -// URL returns the URL of this advisory. -func (haf HashedAdvisoryFile) URL() string { return haf[0] } - -// SHA256URL returns the URL of SHA256 hash file of this advisory. -func (haf HashedAdvisoryFile) SHA256URL() string { return haf.name(1, ".sha256") } - -// SHA512URL returns the URL of SHA512 hash file of this advisory. -func (haf HashedAdvisoryFile) SHA512URL() string { return haf.name(2, ".sha512") } - -// SignURL returns the URL of signature file of this advisory. -func (haf HashedAdvisoryFile) SignURL() string { return haf.name(3, ".asc") } - -// LogValue implements [slog.LogValuer] -func (haf HashedAdvisoryFile) LogValue() slog.Value { - return slog.GroupValue(slog.String("url", haf.URL())) -} - // AdvisoryFileProcessor implements the extraction of // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { @@ -120,7 +95,7 @@ func empty(arr []string) bool { return true } -// Process extracts the adivisory filenames and passes them with +// Process extracts the advisory filenames and passes them with // the corresponding label to fn. func (afp *AdvisoryFileProcessor) Process( fn func(TLPLabel, []AdvisoryFile) error, @@ -201,6 +176,15 @@ func (afp *AdvisoryFileProcessor) Process( return nil } +// checkURL returns the URL if it is accessible. +func (afp *AdvisoryFileProcessor) checkURL(url string) string { + _, err := afp.client.Head(url) + if err != nil { + return url + } + return "" +} + // loadChanges loads baseURL/changes.csv and returns a list of files // prefixed by baseURL/. func (afp *AdvisoryFileProcessor) loadChanges( @@ -257,8 +241,19 @@ func (afp *AdvisoryFileProcessor) loadChanges( lg("%q contains an invalid URL %q in line %d", changesURL, path, line) continue } + + self := base.JoinPath(path).String() + sha256 := afp.checkURL(self + ".sha256") + sha512 := afp.checkURL(self + ".sha512") + sign := afp.checkURL(self + ".asc") + files = append(files, - PlainAdvisoryFile(base.JoinPath(path).String())) + PlainAdvisoryFile{ + Path: path, + SHA256: sha256, + SHA512: sha512, + Sign: sign, + }) } return files, nil } @@ -325,7 +320,6 @@ func (afp *AdvisoryFileProcessor) processROLIE( } rfeed.Entries(func(entry *Entry) { - // Filter if we have date checking. if afp.AgeAccept != nil { if t := time.Time(entry.Updated); !t.IsZero() && !afp.AgeAccept(t) { @@ -359,10 +353,15 @@ func (afp *AdvisoryFileProcessor) processROLIE( var file AdvisoryFile - if sha256 != "" || sha512 != "" || sign != "" { - file = HashedAdvisoryFile{self, sha256, sha512, sign} - } else { - file = PlainAdvisoryFile(self) + switch { + case sha256 == "" && sha512 == "": + slog.Error("No hash listed on ROLIE feed", "file", self) + return + case sign == "": + slog.Error("No signature listed on ROLIE feed", "file", self) + return + default: + file = PlainAdvisoryFile{self, sha256, sha512, sign} } files = append(files, file) From 0ab851a87428ddce7a55a335bd0d58e8dc541e73 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 25 Jul 2024 13:26:29 +0200 Subject: [PATCH 034/117] Use a default user agent --- cmd/csaf_aggregator/config.go | 5 +++++ cmd/csaf_checker/processor.go | 8 +++----- cmd/csaf_downloader/downloader.go | 8 +++----- cmd/csaf_downloader/forwarder.go | 8 +++----- util/client.go | 5 +++++ 5 files changed, 19 insertions(+), 15 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 9808542..b73286c 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -301,6 +301,11 @@ func (c *config) httpClient(p *provider) util.Client { Client: client, Header: c.ExtraHeader, } + default: + client = &util.HeaderClient{ + Client: client, + Header: http.Header{}, + } } if c.Verbose { diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 451a315..49e815c 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -435,11 +435,9 @@ func (p *processor) fullClient() util.Client { client := util.Client(&hClient) // Add extra headers. - if len(p.cfg.ExtraHeader) > 0 { - client = &util.HeaderClient{ - Client: client, - Header: p.cfg.ExtraHeader, - } + client = &util.HeaderClient{ + Client: client, + Header: p.cfg.ExtraHeader, } // Add optional URL logging. diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index badf060..9cef294 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -126,11 +126,9 @@ func (d *downloader) httpClient() util.Client { client := util.Client(&hClient) // Add extra headers. - if len(d.cfg.ExtraHeader) > 0 { - client = &util.HeaderClient{ - Client: client, - Header: d.cfg.ExtraHeader, - } + client = &util.HeaderClient{ + Client: client, + Header: d.cfg.ExtraHeader, } // Add optional URL logging. diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index 3b1435a..c3681eb 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -111,11 +111,9 @@ func (f *forwarder) httpClient() util.Client { client := util.Client(&hClient) // Add extra headers. - if len(f.cfg.ForwardHeader) > 0 { - client = &util.HeaderClient{ - Client: client, - Header: f.cfg.ForwardHeader, - } + client = &util.HeaderClient{ + Client: client, + Header: f.cfg.ForwardHeader, } // Add optional URL logging. diff --git a/util/client.go b/util/client.go index 5a11c7b..441aaaa 100644 --- a/util/client.go +++ b/util/client.go @@ -61,6 +61,11 @@ func (hc *HeaderClient) Do(req *http.Request) (*http.Response, error) { req.Header.Add(key, v) } } + + // Use default user agent if none is set + if userAgent := hc.Header.Get("User-Agent"); userAgent == "" { + req.Header.Add("User-Agent", "csaf-distribution/"+SemVersion) + } return hc.Client.Do(req) } From 3a67fb52100dac0ca64719899afb431fbb8bd590 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Wed, 31 Jul 2024 11:00:40 +0200 Subject: [PATCH 035/117] Add user-agent documentation --- docs/csaf_checker.md | 13 +++++++++---- docs/csaf_downloader.md | 13 +++++++++++-- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index 58f77ca..a5bc0bf 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -30,9 +30,12 @@ Help Options: Will check all given _domains_, by trying each as a CSAF provider. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. + If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and checking proceeds from there. If no config file is explictly given the follwing places are searched for a config file: + ``` ~/.config/csaf/checker.toml ~/.csaf_checker.toml @@ -41,6 +44,7 @@ csaf_checker.toml with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems. Supported options in config files: + ``` output = "" format = "json" @@ -58,9 +62,10 @@ validator_preset = ["mandatory"] ``` Usage example: -` ./csaf_checker example.com -f html --rate=5.3 -H apikey:SECRET -o check-results.html` +`./csaf_checker example.com -f html --rate=5.3 -H apikey:SECRET -o check-results.html` Each performed check has a return type of either 0,1 or 2: + ``` type 0: success type 1: warning @@ -70,16 +75,16 @@ type 2: error The checker result is a success if no checks resulted in type 2, and a failure otherwise. The option `timerange` allows to only check advisories from a given time -interval. It can only be given once. See the +interval. It can only be given once. See the [downloader documentation](csaf_downloader.md#timerange-option) for details. - You can ignore certain advisories while checking by specifying a list of regular expressions[^1] to match their URLs by using the `ignorepattern` option. E.g. `-i='.*white.*' -i='*.red.*'` will ignore files which URLs contain the sub strings **white** or **red**. In the config file this has to be noted as: + ``` ignorepattern = [".*white.*", ".*red.*"] ``` @@ -88,7 +93,7 @@ ignorepattern = [".*white.*", ".*red.*"] The `role` given in the `provider-metadata.json` is not yet considered to change the overall result, -see https://github.com/csaf-poc/csaf_distribution/issues/221 . +see . If a provider hosts one or more advisories with a TLP level of AMBER or RED, then these advisories must be access protected. To check these advisories, authorization can be given via custom headers or certificates. diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index fcf6634..2831cb4 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -1,4 +1,5 @@ ## csaf_downloader + A tool to download CSAF documents from CSAF providers. ### Usage @@ -39,6 +40,8 @@ Help Options: Will download all CSAF documents for the given _domains_, by trying each as a CSAF provider. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. + If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and downloading procedes from there. Increasing the number of workers opens more connections to the web servers @@ -47,6 +50,7 @@ However, since this also increases the load on the servers, their administrators have taken countermeasures to limit this. If no config file is explictly given the follwing places are searched for a config file: + ``` ~/.config/csaf/downloader.toml ~/.csaf_downloader.toml @@ -56,6 +60,7 @@ csaf_downloader.toml with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems. Supported options in config files: + ``` # directory # not set by default insecure = false @@ -90,6 +95,7 @@ option. E.g. `-i='.*white.*' -i='*.red.*'` will ignore files which URLs contain the sub strings **white** or **red**. In the config file this has to be noted as: + ``` ignorepattern = [".*white.*", ".*red.*"] ``` @@ -106,16 +112,18 @@ into a given intervall. There are three possible notations: and 'y' for years are recognized. In these cases only integer values are accepted without any fractions. Some examples: + - `"3h"` means downloading the advisories that have changed in the last three hours. - - `"30m"` .. changed within the last thirty minutes. + - `"30m"` .. changed within the last thirty minutes. - `"3M2m"` .. changed within the last three months and two minutes. - - `"2y"` .. changed within the last two years. + - `"2y"` .. changed within the last two years. 2. Absolute. If the given string is an RFC 3339 date timestamp the time interval between this date and now is used. E.g. `"2006-01-02"` means that all files between 2006 January 2nd and now going to being downloaded. Accepted patterns are: + - `"2006-01-02T15:04:05Z"` - `"2006-01-02T15:04:05+07:00"` - `"2006-01-02T15:04:05-07:00"` @@ -134,6 +142,7 @@ into a given intervall. There are three possible notations: All interval boundaries are inclusive. #### Forwarding + The downloader is able to forward downloaded advisories and their checksums, OpenPGP signatures and validation results to an HTTP endpoint. The details of the implemented API are described [here](https://github.com/mfd2007/csaf_upload_interface). From be2e4e74242774d9e8bfb97f13886d9c4fa6e241 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Wed, 31 Jul 2024 11:42:45 +0200 Subject: [PATCH 036/117] Improve hash path handling of directory feeds --- cmd/csaf_checker/processor.go | 25 ++----------- cmd/csaf_downloader/downloader.go | 29 +++++++++------- csaf/advisories.go | 58 ++++++++++++++++++------------- 3 files changed, 52 insertions(+), 60 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index de42e18..38f3e34 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -895,15 +895,7 @@ func (p *processor) checkIndex(base string, mask whereType) error { continue } - SHA256 := p.checkURL(u + ".sha256") - SHA512 := p.checkURL(u + ".sha512") - sign := p.checkURL(u + ".asc") - files = append(files, csaf.PlainAdvisoryFile{ - Path: u, - SHA256: SHA256, - SHA512: SHA512, - Sign: sign, - }) + files = append(files, csaf.DirectoryAdvisoryFile{Path: u}) } return files, scanner.Err() }() @@ -921,15 +913,6 @@ func (p *processor) checkIndex(base string, mask whereType) error { return p.integrity(files, base, mask, p.badIndices.add) } -// checkURL returns the URL if it is accessible. -func (p *processor) checkURL(url string) string { - _, err := p.client.Head(url) - if err != nil { - return url - } - return "" -} - // checkChanges fetches the "changes.csv" and calls the "checkTLS" method for HTTPs checks. // It extracts the file content, tests the column number and the validity of the time format // of the fields' values and if they are sorted properly. Then it passes the files to the @@ -995,13 +978,9 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] - SHA256 := p.checkURL(path + ".sha256") - SHA512 := p.checkURL(path + ".sha512") - sign := p.checkURL(path + ".asc") - times, files = append(times, t), - append(files, csaf.PlainAdvisoryFile{Path: path, SHA256: SHA256, SHA512: SHA512, Sign: sign}) + append(files, csaf.DirectoryAdvisoryFile{Path: path}) } return times, files, nil }() diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 025ed65..3bf3647 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -501,31 +501,31 @@ nextAdvisory: signData []byte ) - if file.SHA256URL() == "" { - slog.Info("SHA256 not present", "file", file.URL()) - } else { - // Only hash when we have a remote counterpart we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + // Only hash when we have a remote counterpart we can compare it with. + if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + if !file.IsDirectory() { slog.Warn("Cannot fetch SHA256", "url", file.SHA256URL(), "error", err) } else { - s256 = sha256.New() - writers = append(writers, s256) + slog.Info("SHA256 not present", "file", file.URL()) } + } else { + s256 = sha256.New() + writers = append(writers, s256) } - if file.SHA512URL() == "" { - slog.Info("SHA512 not present", "file", file.URL()) - } else { - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + if !file.IsDirectory() { slog.Warn("Cannot fetch SHA512", "url", file.SHA512URL(), "error", err) } else { - s512 = sha512.New() - writers = append(writers, s512) + slog.Info("SHA512 not present", "file", file.URL()) } + } else { + s512 = sha512.New() + writers = append(writers, s512) } // Remember the data as we need to store it to file later. @@ -757,6 +757,9 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, } func loadHash(client util.Client, p string) ([]byte, []byte, error) { + if p == "" { + return nil, nil, fmt.Errorf("no hash path provided") + } resp, err := client.Get(p) if err != nil { return nil, nil, err diff --git a/csaf/advisories.go b/csaf/advisories.go index 4aa7f52..d05331c 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -29,11 +29,10 @@ type AdvisoryFile interface { SHA256URL() string SHA512URL() string SignURL() string + IsDirectory() bool } -// PlainAdvisoryFile is a simple implementation of checkFile. -// The hash and signature files are directly constructed by extending -// the file name. +// PlainAdvisoryFile contains all relevant urls of a remote file. type PlainAdvisoryFile struct { Path string SHA256 string @@ -53,11 +52,41 @@ func (paf PlainAdvisoryFile) SHA512URL() string { return paf.SHA512 } // SignURL returns the URL of signature file of this advisory. func (paf PlainAdvisoryFile) SignURL() string { return paf.Sign } +// IsDirectory returns true, if was fetched via directory feeds. +func (paf PlainAdvisoryFile) IsDirectory() bool { return false } + // LogValue implements [slog.LogValuer] func (paf PlainAdvisoryFile) LogValue() slog.Value { return slog.GroupValue(slog.String("url", paf.URL())) } +// DirectoryAdvisoryFile only contains the base file path. +// The hash and signature files are directly constructed by extending +// the file name. +type DirectoryAdvisoryFile struct { + Path string +} + +// URL returns the URL of this advisory. +func (daf DirectoryAdvisoryFile) URL() string { return daf.Path } + +// SHA256URL returns the URL of SHA256 hash file of this advisory. +func (daf DirectoryAdvisoryFile) SHA256URL() string { return daf.Path + ".sha256" } + +// SHA512URL returns the URL of SHA512 hash file of this advisory. +func (daf DirectoryAdvisoryFile) SHA512URL() string { return daf.Path + ".sha512" } + +// SignURL returns the URL of signature file of this advisory. +func (daf DirectoryAdvisoryFile) SignURL() string { return daf.Path + ".asc" } + +// IsDirectory returns true, if was fetched via directory feeds. +func (daf DirectoryAdvisoryFile) IsDirectory() bool { return true } + +// LogValue implements [slog.LogValuer] +func (daf DirectoryAdvisoryFile) LogValue() slog.Value { + return slog.GroupValue(slog.String("url", daf.URL())) +} + // AdvisoryFileProcessor implements the extraction of // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { @@ -69,7 +98,7 @@ type AdvisoryFileProcessor struct { base *url.URL } -// NewAdvisoryFileProcessor constructs an filename extractor +// NewAdvisoryFileProcessor constructs a filename extractor // for a given metadata document. func NewAdvisoryFileProcessor( client util.Client, @@ -176,15 +205,6 @@ func (afp *AdvisoryFileProcessor) Process( return nil } -// checkURL returns the URL if it is accessible. -func (afp *AdvisoryFileProcessor) checkURL(url string) string { - _, err := afp.client.Head(url) - if err != nil { - return url - } - return "" -} - // loadChanges loads baseURL/changes.csv and returns a list of files // prefixed by baseURL/. func (afp *AdvisoryFileProcessor) loadChanges( @@ -242,18 +262,8 @@ func (afp *AdvisoryFileProcessor) loadChanges( continue } - self := base.JoinPath(path).String() - sha256 := afp.checkURL(self + ".sha256") - sha512 := afp.checkURL(self + ".sha512") - sign := afp.checkURL(self + ".asc") - files = append(files, - PlainAdvisoryFile{ - Path: path, - SHA256: sha256, - SHA512: sha512, - Sign: sign, - }) + DirectoryAdvisoryFile{Path: base.JoinPath(path).String()}) } return files, nil } From 1a2ce684ff94a0f47a4b9737698b1961b4aae91b Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 1 Aug 2024 14:53:23 +0200 Subject: [PATCH 037/117] improve default header * use `csaf_distribution` with an underscore as underscores are allowed by RFC9110 and it is more consistent as it is used with underscore at other places. * change example to `VERSION` to indicate that this is dynamic. --- docs/csaf_checker.md | 2 +- docs/csaf_downloader.md | 2 +- util/client.go | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index a5bc0bf..0b223b6 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -30,7 +30,7 @@ Help Options: Will check all given _domains_, by trying each as a CSAF provider. -If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf_distribution/VERSION` is sent. If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and checking proceeds from there. diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 2831cb4..003ae4a 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -40,7 +40,7 @@ Help Options: Will download all CSAF documents for the given _domains_, by trying each as a CSAF provider. -If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf_distribution/VERSION` is sent. If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and downloading procedes from there. diff --git a/util/client.go b/util/client.go index 441aaaa..b4478ca 100644 --- a/util/client.go +++ b/util/client.go @@ -64,7 +64,7 @@ func (hc *HeaderClient) Do(req *http.Request) (*http.Response, error) { // Use default user agent if none is set if userAgent := hc.Header.Get("User-Agent"); userAgent == "" { - req.Header.Add("User-Agent", "csaf-distribution/"+SemVersion) + req.Header.Add("User-Agent", "csaf_distribution/"+SemVersion) } return hc.Client.Do(req) } From 13a635c7e34c56e4ff39cbfc1ef1e2b6e7bd230a Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 1 Aug 2024 15:43:35 +0200 Subject: [PATCH 038/117] Add user-agent documentation to aggregator --- docs/csaf_aggregator.md | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/docs/csaf_aggregator.md b/docs/csaf_aggregator.md index 36cbe7e..661871c 100644 --- a/docs/csaf_aggregator.md +++ b/docs/csaf_aggregator.md @@ -16,6 +16,7 @@ Help Options: ``` If no config file is explictly given the follwing places are searched for a config file: + ``` ~/.config/csaf/aggregator.toml ~/.csaf_aggregator.toml @@ -25,6 +26,7 @@ csaf_aggregator.toml with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems. Usage example for a single run, to test if the config is good: + ```bash ./csaf_aggregator -c docs/examples/aggregator.toml ``` @@ -62,7 +64,6 @@ SHELL=/bin/bash 30 0-23 * * * $HOME/bin/csaf_aggregator --config /etc/csaf_aggregator.toml --interim >> /var/log/csaf_aggregator/interim.log 2>&1 ``` - #### serve via web server Serve the paths where the aggregator writes its `html/` output @@ -78,7 +79,6 @@ a template. For the aggregator the difference is that you can leave out the cgi-bin part, potentially commend out the TLS client parts and adjust the `root` path accordingly. - ### config options The config file is written in [TOML](https://toml.io/en/v1.0.0). @@ -118,10 +118,12 @@ Next we have two TOML _tables_: aggregator // basic infos for the aggregator object remote_validator // config for optional remote validation checker ``` + [See the provider config](csaf_provider.md#provider-options) about how to configure `remote_validator`. At last there is the TOML _array of tables_: + ``` providers // each entry to be mirrored or listed ``` @@ -148,6 +150,9 @@ header Where valid `name` and `domain` settings are required. +If no user agent is specified with `header = "user-agent:custom-agent/1.0"` +then the default agent in the form of `csaf_distribution/VERSION` is sent. + If you want an entry to be listed instead of mirrored in a `aggregator.category == "aggregator"` instance, set `category` to `lister` in the entry. @@ -165,15 +170,16 @@ To offer an easy way of assorting CSAF documents by criteria like document category, languages or values of the branch category within the product tree, ROLIE category values can be configured in `categories`. This can either -be done using an array of strings taken literally or, by prepending `"expr:"`. -The latter is evaluated as JSONPath and the result will be added into the +be done using an array of strings taken literally or, by prepending `"expr:"`. +The latter is evaluated as JSONPath and the result will be added into the categories document. For a more detailed explanation and examples, [refer to the provider config](csaf_provider.md#provider-options). - #### Example config file + + ```toml workers = 2 folder = "/var/csaf_aggregator" @@ -233,8 +239,8 @@ insecure = true category = "lister" # ignore_pattern = [".*white.*", ".*red.*"] ``` - + #### Publish others' advisories From 8feddc70e1c945e2cf2ec8cab92525aa8e89106d Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 23 Jul 2024 13:41:03 +0200 Subject: [PATCH 039/117] feat: no longer require to be root user to call setup scripts --- docs/scripts/Readme.md | 4 ++-- docs/scripts/setupValidationService.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/scripts/Readme.md b/docs/scripts/Readme.md index a3b932d..95f39b2 100644 --- a/docs/scripts/Readme.md +++ b/docs/scripts/Readme.md @@ -12,10 +12,10 @@ and configures nginx for serving TLS connections. As creating the folders needs to authenticate with the csaf_provider, the configurations of TLS server and Client certificate authentication should be set. So it is recommended to call the scripts in this order: `TLSConfigsForITest.sh`, `TLSClientConfigsForITest.sh`, `setupProviderForITest.sh` -Calling example (as root): +Calling example (as user with sudo privileges): ``` bash curl --fail -O https://raw.githubusercontent.com/csaf-poc/csaf_distribution/main/docs/scripts/prepareUbuntuInstanceForITests.sh - bash prepareUbuntuInstanceForITests.sh + sudo bash prepareUbuntuInstanceForITests.sh git clone https://github.com/csaf-poc/csaf_distribution.git # --branch pushd csaf_distribution/docs/scripts/ diff --git a/docs/scripts/setupValidationService.sh b/docs/scripts/setupValidationService.sh index d6f8ba7..4a7dfd7 100755 --- a/docs/scripts/setupValidationService.sh +++ b/docs/scripts/setupValidationService.sh @@ -21,7 +21,7 @@ echo ' remote_validator= { "url" = "http://localhost:8082", "presets" = ["mandatory"], "cache" = "/var/lib/csaf/validations.db" } ' | sudo tee --append /etc/csaf/config.toml -npm install pm2 -g +sudo npm install pm2 -g pushd ~ git clone https://github.com/secvisogram/csaf-validator-service.git From 9037574d967da7ad80972edde4b74810c735e11c Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 8 Aug 2024 12:17:58 +0200 Subject: [PATCH 040/117] Improve PGP fingerprint handling Warn if no fingerprint is specified and give more details, if fingerprint comparison fails. Closes #555 --- cmd/csaf_checker/processor.go | 9 +++++++-- cmd/csaf_downloader/downloader.go | 7 ++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 451a315..b5f949e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1449,7 +1449,7 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { } // checkPGPKeys checks if the OpenPGP keys are available and valid, fetches -// the the remotely keys and compares the fingerprints. +// the remotely keys and compares the fingerprints. // As a result of these a respective error messages are passed to badPGP method // in case of errors. It returns nil if all checks are passed. func (p *processor) checkPGPKeys(_ string) error { @@ -1518,8 +1518,13 @@ func (p *processor) checkPGPKeys(_ string) error { continue } + if key.Fingerprint == "" { + p.badPGPs.warn("No fingerprint for public OpenPGP key found.") + continue + } + if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { - p.badPGPs.error("Fingerprint of public OpenPGP key %s does not match remotely loaded.", u) + p.badPGPs.error("Given Fingerprint (%q) of public OpenPGP key %q does not match remotely loaded (%q).", string(key.Fingerprint), u, ckey.GetFingerprint()) continue } if p.keys == nil { diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index badf060..a5eeb71 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -366,10 +366,15 @@ func (d *downloader) loadOpenPGPKeys( continue } + if key.Fingerprint == "" { + slog.Warn("No fingerprint for public OpenPGP key found.") + continue + } + if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { slog.Warn( "Fingerprint of public OpenPGP key does not match remotely loaded", - "url", u) + "url", u, "fingerprint", key.Fingerprint, "remote-fingerprint", ckey.GetFingerprint()) continue } if d.keys == nil { From c2e24f7bbb1b49f5bcdd6163aad4b03e05398f31 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 6 Sep 2024 18:18:37 +0200 Subject: [PATCH 041/117] Remove check for empty fingerprint The schema validation already catches this error and this check will never run. --- cmd/csaf_checker/processor.go | 5 ----- cmd/csaf_downloader/downloader.go | 5 ----- 2 files changed, 10 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index b5f949e..d05a9ec 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1518,11 +1518,6 @@ func (p *processor) checkPGPKeys(_ string) error { continue } - if key.Fingerprint == "" { - p.badPGPs.warn("No fingerprint for public OpenPGP key found.") - continue - } - if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { p.badPGPs.error("Given Fingerprint (%q) of public OpenPGP key %q does not match remotely loaded (%q).", string(key.Fingerprint), u, ckey.GetFingerprint()) continue diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index a5eeb71..7e07449 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -366,11 +366,6 @@ func (d *downloader) loadOpenPGPKeys( continue } - if key.Fingerprint == "" { - slog.Warn("No fingerprint for public OpenPGP key found.") - continue - } - if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { slog.Warn( "Fingerprint of public OpenPGP key does not match remotely loaded", From 5231b3386b8126b248cc8cc9be451063caa17aab Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Sat, 7 Sep 2024 09:58:14 +0200 Subject: [PATCH 042/117] docs: improve code comment (minor) --- cmd/csaf_checker/processor.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index d05a9ec..c0034ca 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1449,9 +1449,9 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { } // checkPGPKeys checks if the OpenPGP keys are available and valid, fetches -// the remotely keys and compares the fingerprints. -// As a result of these a respective error messages are passed to badPGP method -// in case of errors. It returns nil if all checks are passed. +// the remote pubkeys and compares the fingerprints. +// As a result of these checks respective error messages are passed +// to badPGP methods. It returns nil if all checks are passed. func (p *processor) checkPGPKeys(_ string) error { p.badPGPs.use() From 37c9eaf3467acd8e7ad08dfb3a076cf9849c67cc Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Mon, 9 Sep 2024 10:25:13 +0200 Subject: [PATCH 043/117] Add CLI flags to specify what hash is preferred --- cmd/csaf_downloader/config.go | 12 +++++++- cmd/csaf_downloader/downloader.go | 50 ++++++++++++++----------------- 2 files changed, 34 insertions(+), 28 deletions(-) diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index dcfc090..71c5055 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -41,6 +41,13 @@ const ( validationUnsafe = validationMode("unsafe") ) +type hashAlgorithm string + +const ( + algSha256 = hashAlgorithm("SHA256") + algSha2512 = hashAlgorithm("SHA512") +) + type config struct { Directory string `short:"d" long:"directory" description:"DIRectory to store the downloaded files in" value-name:"DIR" toml:"directory"` Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider" toml:"insecure"` @@ -79,6 +86,9 @@ type config struct { clientCerts []tls.Certificate ignorePattern filter.PatternMatcher + + //lint:ignore SA5008 We are using choice or than once: sha256, sha512 + PreferredHash hashAlgorithm `long:"preferred_hash" short:"h" choice:"sha256" choice:"sha512" value-name:"HASH" description:"HASH to prefer" toml:"preferred_hash"` } // configPaths are the potential file locations of the config file. @@ -220,7 +230,7 @@ func (cfg *config) prepareLogging() error { w = f } ho := slog.HandlerOptions{ - //AddSource: true, + // AddSource: true, Level: cfg.LogLevel.Level, ReplaceAttr: dropSubSeconds, } diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 3bf3647..3cb7332 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -53,7 +53,6 @@ type downloader struct { const failedValidationDir = "failed_validation" func newDownloader(cfg *config) (*downloader, error) { - var validator csaf.RemoteValidator if cfg.RemoteValidator != "" { @@ -103,7 +102,6 @@ func logRedirect(req *http.Request, via []*http.Request) error { } func (d *downloader) httpClient() util.Client { - hClient := http.Client{} if d.cfg.verbose() { @@ -253,7 +251,6 @@ func (d *downloader) downloadFiles( label csaf.TLPLabel, files []csaf.AdvisoryFile, ) error { - var ( advisoryCh = make(chan csaf.AdvisoryFile) errorCh = make(chan error) @@ -303,7 +300,6 @@ func (d *downloader) loadOpenPGPKeys( base *url.URL, expr *util.PathEval, ) error { - src, err := expr.Eval("$.public_openpgp_keys", doc) if err != nil { // no keys. @@ -357,7 +353,6 @@ func (d *downloader) loadOpenPGPKeys( defer res.Body.Close() return crypto.NewKeyFromArmoredReader(res.Body) }() - if err != nil { slog.Warn( "Reading public OpenPGP key failed", @@ -501,31 +496,35 @@ nextAdvisory: signData []byte ) - // Only hash when we have a remote counterpart we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA256", - "url", file.SHA256URL(), - "error", err) + if (d.cfg.PreferredHash != "sha512" || file.SHA512URL() == "") && file.SHA256URL() != "" { + // Only hash when we have a remote counterpart we can compare it with. + if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + if !file.IsDirectory() { + slog.Warn("Cannot fetch SHA256", + "url", file.SHA256URL(), + "error", err) + } else { + slog.Info("SHA256 not present", "file", file.URL()) + } } else { - slog.Info("SHA256 not present", "file", file.URL()) + s256 = sha256.New() + writers = append(writers, s256) } - } else { - s256 = sha256.New() - writers = append(writers, s256) } - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA512", - "url", file.SHA512URL(), - "error", err) + if (d.cfg.PreferredHash != "sha256" || file.SHA256URL() == "") && file.SHA512URL() != "" { + if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + if !file.IsDirectory() { + slog.Warn("Cannot fetch SHA512", + "url", file.SHA512URL(), + "error", err) + } else { + slog.Info("SHA512 not present", "file", file.URL()) + } } else { - slog.Info("SHA512 not present", "file", file.URL()) + s512 = sha512.New() + writers = append(writers, s512) } - } else { - s512 = sha512.New() - writers = append(writers, s512) } // Remember the data as we need to store it to file later. @@ -757,9 +756,6 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, } func loadHash(client util.Client, p string) ([]byte, []byte, error) { - if p == "" { - return nil, nil, fmt.Errorf("no hash path provided") - } resp, err := client.Get(p) if err != nil { return nil, nil, err From c148a18dba7684b17af5306569d2b4a737332e3b Mon Sep 17 00:00:00 2001 From: 4echow <33332102+4echow@users.noreply.github.com> Date: Thu, 12 Sep 2024 09:27:12 +0200 Subject: [PATCH 044/117] docs:: fix miner typo in csaf_downloader.md --- docs/csaf_downloader.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 003ae4a..07c6e63 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -158,7 +158,7 @@ key protection mechanism based on RFC 1423, see Thus it considered experimental and most likely to be removed in a future release. Please only use this option, if you fully understand the security implications! -Note that for fully automated processes, it usually not make sense +Note that for fully automated processes, it usually does not make sense to protect the client certificate's private key with a passphrase. Because the passphrase has to be accessible to the process anyway to run unattented. In this situation the processing environment should be secured From f36c96e79864e9aea64ce6b1017521b2492b6492 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Mon, 9 Sep 2024 16:04:47 +0200 Subject: [PATCH 045/117] Upgrade to go v1.22 Closes #570 --- .github/workflows/itest.yml | 2 +- README.md | 2 +- docs/Development.md | 2 +- go.mod | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index eff11c2..364c330 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -9,7 +9,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v3 with: - go-version: 1.21.0 + go-version: 1.22.0 - name: Set up Node.js uses: actions/setup-node@v3 diff --git a/README.md b/README.md index bc9ae2a..14ac64f 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ Download the binaries from the most recent release assets on Github. ### Build from sources -- A recent version of **Go** (1.21+) should be installed. [Go installation](https://go.dev/doc/install) +- A recent version of **Go** (1.22+) should be installed. [Go installation](https://go.dev/doc/install) - Clone the repository `git clone https://github.com/csaf-poc/csaf_distribution.git ` diff --git a/docs/Development.md b/docs/Development.md index e7ce388..5c4df22 100644 --- a/docs/Development.md +++ b/docs/Development.md @@ -3,7 +3,7 @@ ## Supported Go versions We support the latest version and the one before -the latest version of Go (currently 1.22 and 1.21). +the latest version of Go (currently 1.22 and 1.23). ## Generated files diff --git a/go.mod b/go.mod index 5b33724..52f1f02 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/csaf-poc/csaf_distribution/v3 -go 1.21 +go 1.22 require ( github.com/BurntSushi/toml v1.3.2 From c0de0c2b6de4d0b739badfcbe6d259739af9cffa Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 27 Sep 2024 15:20:36 +0200 Subject: [PATCH 046/117] Check if hash present, before sending a request --- cmd/csaf_checker/processor.go | 44 +++++++++++------------------------ 1 file changed, 14 insertions(+), 30 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 38f3e34..ede8fd6 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -84,10 +84,8 @@ type reporter interface { report(*processor, *Domain) } -var ( - // errContinue indicates that the current check should continue. - errContinue = errors.New("continue") -) +// errContinue indicates that the current check should continue. +var errContinue = errors.New("continue") type whereType byte @@ -167,7 +165,6 @@ func (m *topicMessages) hasErrors() bool { // newProcessor returns an initialized processor. func newProcessor(cfg *config) (*processor, error) { - var validator csaf.RemoteValidator if cfg.RemoteValidator != "" { @@ -240,7 +237,6 @@ func (p *processor) reset() { // Then it calls the report method on each report from the given "reporters" parameter for each domain. // It returns a pointer to the report and nil, otherwise an error. func (p *processor) run(domains []string) (*Report, error) { - report := Report{ Date: ReportTime{Time: time.Now().UTC()}, Version: util.SemVersion, @@ -297,7 +293,6 @@ func (p *processor) run(domains []string) (*Report, error) { // fillMeta fills the report with extra informations from provider metadata. func (p *processor) fillMeta(domain *Domain) error { - if p.pmd == nil { return nil } @@ -323,7 +318,6 @@ func (p *processor) fillMeta(domain *Domain) error { // domainChecks compiles a list of checks which should be performed // for a given domain. func (p *processor) domainChecks(domain string) []func(*processor, string) error { - // If we have a direct domain url we dont need to // perform certain checks. direct := strings.HasPrefix(domain, "https://") @@ -393,7 +387,6 @@ func (p *processor) markChecked(s string, mask whereType) bool { } func (p *processor) checkRedirect(r *http.Request, via []*http.Request) error { - url := r.URL.String() p.checkTLS(url) if p.redirects == nil { @@ -495,7 +488,6 @@ func (p *processor) usedAuthorizedClient() bool { // rolieFeedEntries loads the references to the advisory files for a given feed. func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { - client := p.httpClient() res, err := client.Get(feed) p.badDirListings.use() @@ -546,7 +538,6 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { var files []csaf.AdvisoryFile rfeed.Entries(func(entry *csaf.Entry) { - // Filter if we have date checking. if accept := p.cfg.Range; accept != nil { if t := time.Time(entry.Updated); !t.IsZero() && !accept.Contains(t) { @@ -759,14 +750,20 @@ func (p *processor) integrity( // Check hashes p.badIntegrities.use() - for _, x := range []struct { + type hash struct { ext string url func() string hash []byte - }{ - {"SHA256", f.SHA256URL, s256.Sum(nil)}, - {"SHA512", f.SHA512URL, s512.Sum(nil)}, - } { + } + hashes := []hash{} + if f.SHA256URL() != "" { + hashes = append(hashes, hash{"SHA256", f.SHA256URL, s256.Sum(nil)}) + } + if f.SHA512URL() != "" { + hashes = append(hashes, hash{"SHA512", f.SHA512URL, s512.Sum(nil)}) + } + + for _, x := range hashes { hu, err := url.Parse(x.url()) if err != nil { lg(ErrorType, "Bad URL %s: %v", x.url(), err) @@ -918,7 +915,6 @@ func (p *processor) checkIndex(base string, mask whereType) error { // of the fields' values and if they are sorted properly. Then it passes the files to the // "integrity" functions. It returns error if some test fails, otherwise nil. func (p *processor) checkChanges(base string, mask whereType) error { - bu, err := url.Parse(base) if err != nil { return err @@ -978,8 +974,7 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] - times, files = - append(times, t), + times, files = append(times, t), append(files, csaf.DirectoryAdvisoryFile{Path: path}) } return times, files, nil @@ -1152,7 +1147,6 @@ func (p *processor) checkMissing(string) error { // checkInvalid goes over all found adivisories URLs and checks // if file name conforms to standard. func (p *processor) checkInvalid(string) error { - p.badDirListings.use() var invalids []string @@ -1174,7 +1168,6 @@ func (p *processor) checkInvalid(string) error { // checkListing goes over all found adivisories URLs and checks // if their parent directory is listable. func (p *processor) checkListing(string) error { - p.badDirListings.use() pgs := pages{} @@ -1209,7 +1202,6 @@ func (p *processor) checkListing(string) error { // checkWhitePermissions checks if the TLP:WHITE advisories are // available with unprotected access. func (p *processor) checkWhitePermissions(string) error { - var ids []string for id, open := range p.labelChecker.whiteAdvisories { if !open { @@ -1235,7 +1227,6 @@ func (p *processor) checkWhitePermissions(string) error { // According to the result, the respective error messages added to // badProviderMetadata. func (p *processor) checkProviderMetadata(domain string) bool { - p.badProviderMetadata.use() client := p.httpClient() @@ -1282,7 +1273,6 @@ func (p *processor) checkSecurity(domain string, legacy bool) (int, string) { // checkSecurityFolder checks the security.txt in a given folder. func (p *processor) checkSecurityFolder(folder string) string { - client := p.httpClient() path := folder + "security.txt" res, err := client.Get(path) @@ -1349,7 +1339,6 @@ func (p *processor) checkSecurityFolder(folder string) string { // and serves the "provider-metadata.json". // It returns an empty string if all checks are passed, otherwise the errormessage. func (p *processor) checkDNS(domain string) string { - client := p.httpClient() path := "https://csaf.data.security." + domain res, err := client.Get(path) @@ -1359,7 +1348,6 @@ func (p *processor) checkDNS(domain string) string { if res.StatusCode != http.StatusOK { return fmt.Sprintf("Fetching %s failed. Status code %d (%s)", path, res.StatusCode, res.Status) - } hash := sha256.New() defer res.Body.Close() @@ -1378,7 +1366,6 @@ func (p *processor) checkDNS(domain string) string { // available under the /.well-known/csaf/ directory. Returns the errormessage if // an error was encountered, or an empty string otherwise func (p *processor) checkWellknown(domain string) string { - client := p.httpClient() path := "https://" + domain + "/.well-known/csaf/provider-metadata.json" @@ -1408,7 +1395,6 @@ func (p *processor) checkWellknown(domain string) string { // The function returns nil, unless errors outside the checks were found. // In that case, errors are returned. func (p *processor) checkWellknownSecurityDNS(domain string) error { - warningsW := p.checkWellknown(domain) // Security check for well known (default) and legacy location warningsS, sDMessage := p.checkSecurity(domain, false) @@ -1461,7 +1447,6 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { // As a result of these a respective error messages are passed to badPGP method // in case of errors. It returns nil if all checks are passed. func (p *processor) checkPGPKeys(_ string) error { - p.badPGPs.use() src, err := p.expr.Eval("$.public_openpgp_keys", p.pmd) @@ -1520,7 +1505,6 @@ func (p *processor) checkPGPKeys(_ string) error { defer res.Body.Close() return crypto.NewKeyFromArmoredReader(res.Body) }() - if err != nil { p.badPGPs.error("Reading public OpenPGP key %s failed: %v", u, err) continue From f7dc3f5ec74ea8ccada62f64a15cd9d6f9fd8b72 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Sun, 29 Sep 2024 09:08:01 +0200 Subject: [PATCH 047/117] Use .test TLD for integration setup (#577) .local is reserved for local-area networks, and .localhost is reserved for loopback devices. Using .test allows easier usage for different test setups. * https://www.rfc-editor.org/rfc/rfc2606#section-2 defines the "test." top level domain and "localhost.". * https://www.rfc-editor.org/rfc/rfc6761.html#section-6.2 explains how different implementations can use "test.". --- docs/development-ca.md | 2 +- docs/scripts/setupProviderForITest.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/development-ca.md b/docs/development-ca.md index 483732c..21f4ef4 100644 --- a/docs/development-ca.md +++ b/docs/development-ca.md @@ -55,7 +55,7 @@ signing_key encryption_key non_repudiation -dns_name = "*.local" +dns_name = "*.test" dns_name = "localhost" serial = 010 diff --git a/docs/scripts/setupProviderForITest.sh b/docs/scripts/setupProviderForITest.sh index 1a57f1e..f9d7d18 100755 --- a/docs/scripts/setupProviderForITest.sh +++ b/docs/scripts/setupProviderForITest.sh @@ -17,7 +17,7 @@ sudo chgrp -R www-data /var/www sudo chmod -R g+ws /var/www export NGINX_CONFIG_PATH=/etc/nginx/sites-available/default -export DNS_NAME=csaf.data.security.localhost +export DNS_NAME=csaf.data.security.test sudo cp /usr/share/doc/fcgiwrap/examples/nginx.conf /etc/nginx/fcgiwrap.conf From 18e2e35e7cf0d92d463eaad736074c5c9d43165b Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Tue, 29 Oct 2024 09:49:27 +0100 Subject: [PATCH 048/117] Update README.md with link update alert --- README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/README.md b/README.md index 14ac64f..1953854 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,13 @@ Software-Engineering: 2024 Intevation GmbH --> + +> [!IMPORTANT] +> Adjust your links and go module paths if you still have `csaf-poc` in them, to avoid future breakage. +> (This repository was moved here on 2024-10-28. The old one is decrecated +> and redirection will be switched off in a few months.) + + # csaf_distribution Implements a [CSAF](https://csaf.io/) @@ -16,6 +23,7 @@ and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. + ## Tools for users ### [csaf_downloader](docs/csaf_downloader.md) is a tool for downloading advisories from a provider. From bdd8aa0a9415da3641cf2624ac0f57381e16b9b2 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Tue, 29 Oct 2024 09:50:26 +0100 Subject: [PATCH 049/117] Update README.md --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 1953854..e6ea77f 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,6 @@ and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. - ## Tools for users ### [csaf_downloader](docs/csaf_downloader.md) is a tool for downloading advisories from a provider. From bf057e2fa8f25e155bb616ebe98523c0f76e5148 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 30 Oct 2024 10:51:38 +0100 Subject: [PATCH 050/117] Update repo move alert in README.md HTML links can be adjusted right now, go module paths will have to wait a bit. --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index e6ea77f..53920d8 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,9 @@ > [!IMPORTANT] -> Adjust your links and go module paths if you still have `csaf-poc` in them, to avoid future breakage. +> To avoid future breakage, if you still use `csaf-poc`: +> 1. Adjust your HTML links. +> 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. > (This repository was moved here on 2024-10-28. The old one is decrecated > and redirection will be switched off in a few months.) From 6ebe7f5f5d3845cc1c9fa26d209de12b4870150a Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 30 Oct 2024 10:53:15 +0100 Subject: [PATCH 051/117] Update repo move alert in README.md use a better phrasing --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 53920d8..f28567e 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ > 1. Adjust your HTML links. > 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. > (This repository was moved here on 2024-10-28. The old one is decrecated -> and redirection will be switched off in a few months.) +> and redirection will be switched off a few months later.) # csaf_distribution From 7aa95c03ca1f5a19914cce0158fb3212cab80d19 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Wed, 30 Oct 2024 11:03:18 +0100 Subject: [PATCH 052/117] fix: bring aggregator schema to errata01 (#583) --- csaf/schema/aggregator_json_schema.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/csaf/schema/aggregator_json_schema.json b/csaf/schema/aggregator_json_schema.json index 7929f1f..cdad109 100644 --- a/csaf/schema/aggregator_json_schema.json +++ b/csaf/schema/aggregator_json_schema.json @@ -175,7 +175,7 @@ "type": "object", "required": [ "metadata", - "mirror", + "mirrors", "update_interval" ], "properties": { From 1aad5331d2d8d992467e8b5694c43f53dae2d22b Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 30 Oct 2024 11:15:31 +0100 Subject: [PATCH 053/117] Update README.md reformat a bit --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index f28567e..8bdfd88 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ > To avoid future breakage, if you still use `csaf-poc`: > 1. Adjust your HTML links. > 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. +> > (This repository was moved here on 2024-10-28. The old one is decrecated > and redirection will be switched off a few months later.) From 1c860a1ab21692f176ecc033fc484dcebc9f5728 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Wed, 30 Oct 2024 11:22:24 +0100 Subject: [PATCH 054/117] Update README.md: Fix: typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8bdfd88..568bf03 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ > 1. Adjust your HTML links. > 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. > -> (This repository was moved here on 2024-10-28. The old one is decrecated +> (This repository was moved here on 2024-10-28. The old one is deprecated > and redirection will be switched off a few months later.) From ffadad38c6cc9aa9b29af2489ea4487d676e0f34 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Wed, 30 Oct 2024 15:53:22 +0100 Subject: [PATCH 055/117] improve test setupscript by adding missing zip Add zip as packages to be installed in preparation as the `make dist` target uses it. --- docs/scripts/prepareUbuntuInstanceForITests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scripts/prepareUbuntuInstanceForITests.sh b/docs/scripts/prepareUbuntuInstanceForITests.sh index ea88fc4..f124044 100755 --- a/docs/scripts/prepareUbuntuInstanceForITests.sh +++ b/docs/scripts/prepareUbuntuInstanceForITests.sh @@ -6,7 +6,7 @@ set -e # by installing the required packages. apt update -apt install -y make bash curl gnupg sed tar git nginx fcgiwrap gnutls-bin +apt install -y make bash curl gnupg sed tar git nginx fcgiwrap gnutls-bin zip # Install Go from binary distribution latest_go="$(curl https://go.dev/VERSION\?m=text| head -1).linux-amd64.tar.gz" From e8706e5eb99d40f464587c6d3aba2e2484a3fd6a Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Wed, 30 Oct 2024 14:46:50 +0100 Subject: [PATCH 056/117] feat: perform go path repo move * Change the go module path from github.com/csaf-poc/csaf_distribution to github.com/gocsaf/csaf. * Rename archive for release tarballs. * Adjust testing scripts and documentation. --- .github/workflows/itest.yml | 2 +- .github/workflows/release.yml | 4 ++-- Makefile | 6 +++--- README.md | 10 +++++----- cmd/csaf_aggregator/client.go | 2 +- cmd/csaf_aggregator/config.go | 12 ++++++------ cmd/csaf_aggregator/full.go | 4 ++-- cmd/csaf_aggregator/indices.go | 4 ++-- cmd/csaf_aggregator/interim.go | 4 ++-- cmd/csaf_aggregator/lazytransaction.go | 2 +- cmd/csaf_aggregator/lister.go | 4 ++-- cmd/csaf_aggregator/main.go | 2 +- cmd/csaf_aggregator/mirror.go | 4 ++-- cmd/csaf_aggregator/processor.go | 4 ++-- cmd/csaf_checker/config.go | 8 ++++---- cmd/csaf_checker/links.go | 2 +- cmd/csaf_checker/main.go | 2 +- cmd/csaf_checker/processor.go | 4 ++-- cmd/csaf_checker/report.go | 4 ++-- cmd/csaf_checker/reporters.go | 2 +- cmd/csaf_checker/roliecheck.go | 4 ++-- cmd/csaf_checker/rules.go | 2 +- cmd/csaf_downloader/config.go | 8 ++++---- cmd/csaf_downloader/downloader.go | 4 ++-- cmd/csaf_downloader/forwarder.go | 4 ++-- cmd/csaf_downloader/forwarder_test.go | 4 ++-- cmd/csaf_downloader/main.go | 2 +- cmd/csaf_provider/actions.go | 4 ++-- cmd/csaf_provider/config.go | 2 +- cmd/csaf_provider/create.go | 4 ++-- cmd/csaf_provider/files.go | 2 +- cmd/csaf_provider/indices.go | 2 +- cmd/csaf_provider/main.go | 2 +- cmd/csaf_provider/rolie.go | 4 ++-- cmd/csaf_provider/transaction.go | 4 ++-- cmd/csaf_uploader/config.go | 4 ++-- cmd/csaf_uploader/main.go | 2 +- cmd/csaf_uploader/processor.go | 6 +++--- cmd/csaf_validator/main.go | 4 ++-- csaf/advisories.go | 2 +- csaf/models.go | 2 +- csaf/providermetaloader.go | 2 +- csaf/rolie.go | 2 +- csaf/summary.go | 2 +- docs/csaf_checker.md | 2 +- docs/csaf_provider.md | 4 ++-- docs/provider-setup.md | 2 +- docs/scripts/Readme.md | 8 ++++---- docs/scripts/TLSClientConfigsForITest.sh | 2 +- docs/scripts/TLSConfigsForITest.sh | 2 +- docs/scripts/prepareUbuntuInstanceForITests.sh | 2 +- docs/scripts/testAggregator.sh | 2 +- docs/scripts/testChecker.sh | 2 +- docs/scripts/testDownloader.sh | 2 +- examples/README.md | 2 +- examples/purls_searcher/main.go | 6 +++--- go.mod | 2 +- internal/options/options.go | 2 +- 58 files changed, 102 insertions(+), 102 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index 364c330..9cc4c6b 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -25,7 +25,7 @@ jobs: sudo apt install -y make nginx fcgiwrap gnutls-bin cp -r $GITHUB_WORKSPACE ~ cd ~ - cd csaf_distribution/docs/scripts/ + cd csaf/docs/scripts/ # keep in sync with docs/scripts/Readme.md export FOLDERNAME=devca1 ORGANAME="CSAF Tools Development (internal)" source ./TLSConfigsForITest.sh diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 739f45c..4bcd6ba 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -24,5 +24,5 @@ jobs: uses: softprops/action-gh-release@v1 with: files: | - dist/csaf_distribution-*.zip - dist/csaf_distribution-*.tar.gz + dist/csaf-*.zip + dist/csaf-*.tar.gz diff --git a/Makefile b/Makefile index b4b3964..083d3b6 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ # SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) # Software-Engineering: 2021 Intevation GmbH # -# Makefile to build csaf_distribution components +# Makefile to build csaf components SHELL = /bin/bash BUILD = go build @@ -59,7 +59,7 @@ testsemver: # Set -ldflags parameter to pass the semversion. -LDFLAGS = -ldflags "-X github.com/csaf-poc/csaf_distribution/v3/util.SemVersion=$(SEMVER)" +LDFLAGS = -ldflags "-X github.com/gocsaf/csaf/v3/util.SemVersion=$(SEMVER)" # Build binaries and place them under bin-$(GOOS)-$(GOARCH) # Using 'Target-specific Variable Values' to specify the build target system @@ -78,7 +78,7 @@ build_linux build_win build_mac_amd64 build_mac_arm64: env GOARCH=$(GOARCH) GOOS=$(GOOS) $(BUILD) -o $(BINDIR) $(LDFLAGS) -v ./cmd/... -DISTDIR := csaf_distribution-$(SEMVER) +DISTDIR := csaf-$(SEMVER) dist: build_linux build_win build_mac_amd64 build_mac_arm64 mkdir -p dist mkdir -p dist/$(DISTDIR)-windows-amd64/bin-windows-amd64 diff --git a/README.md b/README.md index 568bf03..cec9248 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ > and redirection will be switched off a few months later.) -# csaf_distribution +# csaf Implements a [CSAF](https://csaf.io/) ([specification v2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) @@ -52,10 +52,10 @@ is a CSAF Aggregator, to list or mirror providers. ## Other stuff ### [examples](./examples/README.md) -are small examples of how to use `github.com/csaf-poc/csaf_distribution` +are small examples of how to use `github.com/gocsaf/csaf` as an API. Currently this is a work in progress, as usage of this repository as a library to access is _not officially supported_, e.g. -see https://github.com/csaf-poc/csaf_distribution/issues/367 . +see https://github.com/gocsaf/csaf/issues/367 . ## Setup Binaries for the server side are only available and tested @@ -81,7 +81,7 @@ Download the binaries from the most recent release assets on Github. - A recent version of **Go** (1.22+) should be installed. [Go installation](https://go.dev/doc/install) -- Clone the repository `git clone https://github.com/csaf-poc/csaf_distribution.git ` +- Clone the repository `git clone https://github.com/gocsaf/csaf.git ` - Build Go components Makefile supplies the following targets: - Build for GNU/Linux system: `make build_linux` @@ -110,7 +110,7 @@ For further details of the development process consult our [development page](./ ## License -- `csaf_distribution` is licensed as Free Software under the terms of the [Apache License, Version 2.0](./LICENSES/Apache-2.0.txt). +- `csaf` is licensed as Free Software under the terms of the [Apache License, Version 2.0](./LICENSES/Apache-2.0.txt). - See the specific source files for details, the license itself can be found in the directory `LICENSES/`. diff --git a/cmd/csaf_aggregator/client.go b/cmd/csaf_aggregator/client.go index 8200d34..916baa5 100644 --- a/cmd/csaf_aggregator/client.go +++ b/cmd/csaf_aggregator/client.go @@ -13,7 +13,7 @@ import ( "io" "net/http" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) var errNotFound = errors.New("not found") diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index b73286c..81db0b7 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -20,12 +20,12 @@ import ( "time" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/filter" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/filter" + "github.com/gocsaf/csaf/v3/internal/models" + "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/util" "golang.org/x/time/rate" ) diff --git a/cmd/csaf_aggregator/full.go b/cmd/csaf_aggregator/full.go index 9ec9812..e71d7b6 100644 --- a/cmd/csaf_aggregator/full.go +++ b/cmd/csaf_aggregator/full.go @@ -18,8 +18,8 @@ import ( "sync" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type fullJob struct { diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 272d25b..17c8d3a 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -19,8 +19,8 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) const ( diff --git a/cmd/csaf_aggregator/interim.go b/cmd/csaf_aggregator/interim.go index 023c9c4..94147bc 100644 --- a/cmd/csaf_aggregator/interim.go +++ b/cmd/csaf_aggregator/interim.go @@ -24,8 +24,8 @@ import ( "sync" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type interimJob struct { diff --git a/cmd/csaf_aggregator/lazytransaction.go b/cmd/csaf_aggregator/lazytransaction.go index 606d892..af36ee2 100644 --- a/cmd/csaf_aggregator/lazytransaction.go +++ b/cmd/csaf_aggregator/lazytransaction.go @@ -13,7 +13,7 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type lazyTransaction struct { diff --git a/cmd/csaf_aggregator/lister.go b/cmd/csaf_aggregator/lister.go index 4d758e4..7e1fb58 100644 --- a/cmd/csaf_aggregator/lister.go +++ b/cmd/csaf_aggregator/lister.go @@ -11,8 +11,8 @@ package main import ( "fmt" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // mirrorAllowed checks if mirroring is allowed. diff --git a/cmd/csaf_aggregator/main.go b/cmd/csaf_aggregator/main.go index 39c1051..2056e84 100644 --- a/cmd/csaf_aggregator/main.go +++ b/cmd/csaf_aggregator/main.go @@ -15,7 +15,7 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/options" "github.com/gofrs/flock" ) diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 6bf72a3..c90ef68 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -30,8 +30,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/constants" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // mirrorAllowed checks if mirroring is allowed. diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index 5cb3628..b22e839 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -14,8 +14,8 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" "github.com/ProtonMail/gopenpgp/v2/crypto" ) diff --git a/cmd/csaf_checker/config.go b/cmd/csaf_checker/config.go index ac9ce62..3ea1840 100644 --- a/cmd/csaf_checker/config.go +++ b/cmd/csaf_checker/config.go @@ -13,10 +13,10 @@ import ( "fmt" "net/http" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/filter" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/filter" + "github.com/gocsaf/csaf/v3/internal/models" + "github.com/gocsaf/csaf/v3/internal/options" ) type outputFormat string diff --git a/cmd/csaf_checker/links.go b/cmd/csaf_checker/links.go index 0456ace..a323661 100644 --- a/cmd/csaf_checker/links.go +++ b/cmd/csaf_checker/links.go @@ -15,7 +15,7 @@ import ( "github.com/PuerkitoBio/goquery" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type ( diff --git a/cmd/csaf_checker/main.go b/cmd/csaf_checker/main.go index 752fdf8..4efb351 100644 --- a/cmd/csaf_checker/main.go +++ b/cmd/csaf_checker/main.go @@ -12,7 +12,7 @@ package main import ( "log" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/options" ) // run uses a processor to check all the given domains or direct urls diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index da4214b..5fd3fbd 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -32,8 +32,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/time/rate" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // topicMessages stores the collected topicMessages for a specific topic. diff --git a/cmd/csaf_checker/report.go b/cmd/csaf_checker/report.go index 9b5251b..58ed25a 100644 --- a/cmd/csaf_checker/report.go +++ b/cmd/csaf_checker/report.go @@ -18,8 +18,8 @@ import ( "os" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/models" ) // MessageType is the kind of the message. diff --git a/cmd/csaf_checker/reporters.go b/cmd/csaf_checker/reporters.go index 016d371..157eabe 100644 --- a/cmd/csaf_checker/reporters.go +++ b/cmd/csaf_checker/reporters.go @@ -13,7 +13,7 @@ import ( "sort" "strings" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type ( diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index 53d1150..28bd437 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -15,8 +15,8 @@ import ( "sort" "strings" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // identifier consist of document/tracking/id and document/publisher/namespace, diff --git a/cmd/csaf_checker/rules.go b/cmd/csaf_checker/rules.go index eadbbb2..e04388d 100644 --- a/cmd/csaf_checker/rules.go +++ b/cmd/csaf_checker/rules.go @@ -12,7 +12,7 @@ import ( "fmt" "sort" - "github.com/csaf-poc/csaf_distribution/v3/csaf" + "github.com/gocsaf/csaf/v3/csaf" ) type ruleCondition int diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index dcfc090..33f8dc2 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -19,10 +19,10 @@ import ( "path/filepath" "time" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/filter" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/filter" + "github.com/gocsaf/csaf/v3/internal/models" + "github.com/gocsaf/csaf/v3/internal/options" ) const ( diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index fde4cd3..f21fcc0 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -33,8 +33,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/time/rate" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type downloader struct { diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index c3681eb..12d9fe4 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -19,8 +19,8 @@ import ( "path/filepath" "strings" - "github.com/csaf-poc/csaf_distribution/v3/internal/misc" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/internal/misc" + "github.com/gocsaf/csaf/v3/util" ) // failedForwardDir is the name of the special sub folder diff --git a/cmd/csaf_downloader/forwarder_test.go b/cmd/csaf_downloader/forwarder_test.go index 907bbce..25f0f1f 100644 --- a/cmd/csaf_downloader/forwarder_test.go +++ b/cmd/csaf_downloader/forwarder_test.go @@ -23,8 +23,8 @@ import ( "strings" "testing" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/util" ) func TestValidationStatusUpdate(t *testing.T) { diff --git a/cmd/csaf_downloader/main.go b/cmd/csaf_downloader/main.go index cc284bb..fe6efd1 100644 --- a/cmd/csaf_downloader/main.go +++ b/cmd/csaf_downloader/main.go @@ -15,7 +15,7 @@ import ( "os" "os/signal" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/options" ) func run(cfg *config, domains []string) error { diff --git a/cmd/csaf_provider/actions.go b/cmd/csaf_provider/actions.go index 8f385e6..1862983 100644 --- a/cmd/csaf_provider/actions.go +++ b/cmd/csaf_provider/actions.go @@ -26,8 +26,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/constants" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) const dateFormat = time.RFC3339 diff --git a/cmd/csaf_provider/config.go b/cmd/csaf_provider/config.go index 49a7204..826b7bf 100644 --- a/cmd/csaf_provider/config.go +++ b/cmd/csaf_provider/config.go @@ -18,7 +18,7 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/crypto/bcrypt" - "github.com/csaf-poc/csaf_distribution/v3/csaf" + "github.com/gocsaf/csaf/v3/csaf" ) const ( diff --git a/cmd/csaf_provider/create.go b/cmd/csaf_provider/create.go index 56893c6..11e0b7c 100644 --- a/cmd/csaf_provider/create.go +++ b/cmd/csaf_provider/create.go @@ -22,8 +22,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // ensureFolders initializes the paths and call functions to create diff --git a/cmd/csaf_provider/files.go b/cmd/csaf_provider/files.go index 39a97e3..3b99ff5 100644 --- a/cmd/csaf_provider/files.go +++ b/cmd/csaf_provider/files.go @@ -13,7 +13,7 @@ import ( "crypto/sha512" "os" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) func writeHashedFile(fname, name string, data []byte, armored string) error { diff --git a/cmd/csaf_provider/indices.go b/cmd/csaf_provider/indices.go index 805371b..a4eb97a 100644 --- a/cmd/csaf_provider/indices.go +++ b/cmd/csaf_provider/indices.go @@ -18,7 +18,7 @@ import ( "sort" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) func updateIndex(dir, fname string) error { diff --git a/cmd/csaf_provider/main.go b/cmd/csaf_provider/main.go index 8740e81..6c858c9 100644 --- a/cmd/csaf_provider/main.go +++ b/cmd/csaf_provider/main.go @@ -18,7 +18,7 @@ import ( "github.com/jessevdk/go-flags" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type options struct { diff --git a/cmd/csaf_provider/rolie.go b/cmd/csaf_provider/rolie.go index 98448bd..d9717b1 100644 --- a/cmd/csaf_provider/rolie.go +++ b/cmd/csaf_provider/rolie.go @@ -15,8 +15,8 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // mergeCategories merges the given categories into the old ones. diff --git a/cmd/csaf_provider/transaction.go b/cmd/csaf_provider/transaction.go index 1b66ae0..c4c93a8 100644 --- a/cmd/csaf_provider/transaction.go +++ b/cmd/csaf_provider/transaction.go @@ -12,8 +12,8 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) func doTransaction( diff --git a/cmd/csaf_uploader/config.go b/cmd/csaf_uploader/config.go index a83361c..ceecff7 100644 --- a/cmd/csaf_uploader/config.go +++ b/cmd/csaf_uploader/config.go @@ -18,8 +18,8 @@ import ( "golang.org/x/crypto/bcrypt" "golang.org/x/term" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/options" ) const ( diff --git a/cmd/csaf_uploader/main.go b/cmd/csaf_uploader/main.go index 20f89fd..db1cef4 100644 --- a/cmd/csaf_uploader/main.go +++ b/cmd/csaf_uploader/main.go @@ -9,7 +9,7 @@ // Implements a command line tool that uploads csaf documents to csaf_provider. package main -import "github.com/csaf-poc/csaf_distribution/v3/internal/options" +import "github.com/gocsaf/csaf/v3/internal/options" func main() { args, cfg, err := parseArgsConfig() diff --git a/cmd/csaf_uploader/processor.go b/cmd/csaf_uploader/processor.go index 4598865..b57cafb 100644 --- a/cmd/csaf_uploader/processor.go +++ b/cmd/csaf_uploader/processor.go @@ -26,9 +26,9 @@ import ( "github.com/ProtonMail/gopenpgp/v2/constants" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/internal/misc" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/misc" + "github.com/gocsaf/csaf/v3/util" ) type processor struct { diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index f6aecc4..b07c2f4 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -18,8 +18,8 @@ import ( "github.com/jessevdk/go-flags" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type options struct { diff --git a/csaf/advisories.go b/csaf/advisories.go index 6f07648..c51c84c 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -19,7 +19,7 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // AdvisoryFile constructs the urls of a remote file. diff --git a/csaf/models.go b/csaf/models.go index c7e507d..c4b132d 100644 --- a/csaf/models.go +++ b/csaf/models.go @@ -17,7 +17,7 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // TLPLabel is the traffic light policy of the CSAF. diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index b21ddc6..b28b606 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -18,7 +18,7 @@ import ( "net/http" "strings" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // ProviderMetadataLoader helps load provider-metadata.json from diff --git a/csaf/rolie.go b/csaf/rolie.go index c2b5b08..b94cfa3 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -14,7 +14,7 @@ import ( "sort" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // ROLIEServiceWorkspaceCollectionCategoriesCategory is a category in a ROLIE service collection. diff --git a/csaf/summary.go b/csaf/summary.go index 72d2faf..b10dd65 100644 --- a/csaf/summary.go +++ b/csaf/summary.go @@ -11,7 +11,7 @@ package csaf import ( "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) const ( diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index 0b223b6..5152501 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -93,7 +93,7 @@ ignorepattern = [".*white.*", ".*red.*"] The `role` given in the `provider-metadata.json` is not yet considered to change the overall result, -see . +see . If a provider hosts one or more advisories with a TLP level of AMBER or RED, then these advisories must be access protected. To check these advisories, authorization can be given via custom headers or certificates. diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index 81a45fa..b88924d 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -141,5 +141,5 @@ contact_details = "Example Company can be reached at contact_us@example.com, or There is an experimental upload interface which works with a web browser. It is disabled by default, as there are known issues, notably: - * https://github.com/csaf-poc/csaf_distribution/issues/43 - * https://github.com/csaf-poc/csaf_distribution/issues/256 + * https://github.com/gocsaf/csaf/issues/43 + * https://github.com/gocsaf/csaf/issues/256 diff --git a/docs/provider-setup.md b/docs/provider-setup.md index 3f07fd0..48c29d0 100644 --- a/docs/provider-setup.md +++ b/docs/provider-setup.md @@ -115,7 +115,7 @@ sudo chmod g+r,o-rwx /etc/csaf/config.toml Here is a minimal example configuration, which you need to customize for a production setup, -see the [options of `csaf_provider`](https://github.com/csaf-poc/csaf_distribution/blob/main/docs/csaf_provider.md). +see the [options of `csaf_provider`](https://github.com/gocsaf/csaf/blob/main/docs/csaf_provider.md). diff --git a/docs/scripts/Readme.md b/docs/scripts/Readme.md index 95f39b2..77e8dae 100644 --- a/docs/scripts/Readme.md +++ b/docs/scripts/Readme.md @@ -1,7 +1,7 @@ Scripts for assisting the Integration tests. They were written on Ubuntu 20.04 TLS amd64 and also tested with 24.04 TLS. -- `prepareUbuntuInstanceForITests.sh` installs the required packages for the csaf_distribution integration tests on a naked ubuntu LTS amd64. +- `prepareUbuntuInstanceForITests.sh` installs the required packages for the csaf integration tests on a naked Ubuntu LTS amd64. - `TLSConfigsForITest.sh` generates a root CA and webserver cert by running `createRootCAForITest.sh` and `createWebserverCertForITest.sh` and configures nginx for serving TLS connections. @@ -14,11 +14,11 @@ As creating the folders needs to authenticate with the csaf_provider, the config Calling example (as user with sudo privileges): ``` bash - curl --fail -O https://raw.githubusercontent.com/csaf-poc/csaf_distribution/main/docs/scripts/prepareUbuntuInstanceForITests.sh + curl --fail -O https://raw.githubusercontent.com/gocsaf/csaf/main/docs/scripts/prepareUbuntuInstanceForITests.sh sudo bash prepareUbuntuInstanceForITests.sh - git clone https://github.com/csaf-poc/csaf_distribution.git # --branch - pushd csaf_distribution/docs/scripts/ + git clone https://github.com/gocsaf/csaf.git # --branch + pushd csaf/docs/scripts/ export FOLDERNAME=devca1 ORGANAME="CSAF Tools Development (internal)" source ./TLSConfigsForITest.sh diff --git a/docs/scripts/TLSClientConfigsForITest.sh b/docs/scripts/TLSClientConfigsForITest.sh index 1f94117..830666f 100755 --- a/docs/scripts/TLSClientConfigsForITest.sh +++ b/docs/scripts/TLSClientConfigsForITest.sh @@ -18,7 +18,7 @@ set -e NGINX_CONFIG_PATH=/etc/nginx/sites-available/default -cd ~/csaf_distribution/docs/scripts/ +cd ~/csaf/docs/scripts/ source ./createCCForITest.sh echo ' diff --git a/docs/scripts/TLSConfigsForITest.sh b/docs/scripts/TLSConfigsForITest.sh index c1a5420..d7c06f9 100644 --- a/docs/scripts/TLSConfigsForITest.sh +++ b/docs/scripts/TLSConfigsForITest.sh @@ -17,7 +17,7 @@ set -e NGINX_CONFIG_PATH=/etc/nginx/sites-available/default -cd ~/csaf_distribution/docs/scripts/ +cd ~/csaf/docs/scripts/ ## Create Root CA ./createRootCAForITest.sh diff --git a/docs/scripts/prepareUbuntuInstanceForITests.sh b/docs/scripts/prepareUbuntuInstanceForITests.sh index ea88fc4..75ce44b 100755 --- a/docs/scripts/prepareUbuntuInstanceForITests.sh +++ b/docs/scripts/prepareUbuntuInstanceForITests.sh @@ -2,7 +2,7 @@ set -e # This script prepares a naked Ubuntu LTS amd64 -# for the csaf_distribution integration tests +# for the csaf integration tests # by installing the required packages. apt update diff --git a/docs/scripts/testAggregator.sh b/docs/scripts/testAggregator.sh index 366ac07..f6322f6 100755 --- a/docs/scripts/testAggregator.sh +++ b/docs/scripts/testAggregator.sh @@ -29,6 +29,6 @@ popd echo echo '=== run aggregator' -cd ~/csaf_distribution/ +cd ~/csaf/ sudo cp docs/examples/aggregator.toml /etc/csaf sudo ./bin-linux-amd64/csaf_aggregator -c /etc/csaf/aggregator.toml diff --git a/docs/scripts/testChecker.sh b/docs/scripts/testChecker.sh index cb45aad..28474d0 100755 --- a/docs/scripts/testChecker.sh +++ b/docs/scripts/testChecker.sh @@ -11,7 +11,7 @@ set -e # to exit if a command in the script fails echo '==== run checker (twice)' -cd ~/csaf_distribution +cd ~/csaf ./bin-linux-amd64/csaf_checker -f html -o ../checker-results.html --insecure \ --client_cert ~/devca1/testclient1.crt \ diff --git a/docs/scripts/testDownloader.sh b/docs/scripts/testDownloader.sh index c4b9bce..6326536 100755 --- a/docs/scripts/testDownloader.sh +++ b/docs/scripts/testDownloader.sh @@ -10,7 +10,7 @@ set -e # to exit if a command in the script fails -cd ~/csaf_distribution +cd ~/csaf echo echo '==== run downloader (1)' diff --git a/examples/README.md b/examples/README.md index a70ea09..c525e96 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,6 +1,6 @@ # API examples -An experimental example of how to use `github.com/csaf-poc/csaf_distribution` +An experimental example of how to use `github.com/gocsaf/csaf` as a library. As usage of the repository as an API is currently a _work in progress_, these examples are likely to be changed. diff --git a/examples/purls_searcher/main.go b/examples/purls_searcher/main.go index c1ec3e1..72fb976 100644 --- a/examples/purls_searcher/main.go +++ b/examples/purls_searcher/main.go @@ -1,5 +1,5 @@ // Package main implements a simple demo program to -// work with the csaf_distribution library. +// work with the csaf library. package main import ( @@ -9,8 +9,8 @@ import ( "os" "strings" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) func main() { diff --git a/go.mod b/go.mod index 52f1f02..c8101f0 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/csaf-poc/csaf_distribution/v3 +module github.com/gocsaf/csaf/v3 go 1.22 diff --git a/internal/options/options.go b/internal/options/options.go index c0ad2bc..3a4867f 100644 --- a/internal/options/options.go +++ b/internal/options/options.go @@ -15,7 +15,7 @@ import ( "log/slog" "os" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" "github.com/BurntSushi/toml" "github.com/jessevdk/go-flags" From ace8aeaf985517cca2d3ba4b4a17db4e0f048021 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 15 Nov 2024 11:46:26 +0100 Subject: [PATCH 057/117] fix: build-in version for release tags * Change Makefile to remove the leading `v` from the git tag in the case of release tags. Previously this was only done for pre-release git tags. --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 083d3b6..04ec866 100644 --- a/Makefile +++ b/Makefile @@ -47,13 +47,13 @@ tag_checked_out: # In this case we might in some situations see an error like # `/bin/bash: line 1: 2b55bbb: value too great for base (error token is "2b55bbb")` # which can be ignored. -GITDESC := $(shell git describe --tags --always) -GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/v?[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') +GITDESC := $(shell git describe --tags --always | sed -E 's/^v//') +GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') SEMVERPATCH := $(shell echo $$(( $(GITDESCPATCH) + 1 ))) # Hint: The regexp in the next line only matches if there is a hyphen (`-`) # followed by a number, by which we assume that git describe # has added a string after the tag -SEMVER := $(shell echo '$(GITDESC)' | sed -E 's/v?([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) +SEMVER := $(shell echo '$(GITDESC)' | sed -E 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) testsemver: @echo from \'$(GITDESC)\' transformed to \'$(SEMVER)\' From 1e3504c7539fd6dac3e7ffdb2c35cb1111153299 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 15 Nov 2024 12:12:24 +0100 Subject: [PATCH 058/117] improve Makefile improvement --- Makefile | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Makefile b/Makefile index 04ec866..163ace5 100644 --- a/Makefile +++ b/Makefile @@ -41,19 +41,19 @@ tag_checked_out: # into a semver version. For this we increase the PATCH number, so that # any commit after a tag is considered newer than the semver from the tag # without an optional 'v' -# Note we need `--tags` because github release only creates lightweight tags +# Note we need `--tags` because github releases only create lightweight tags # (see feature request https://github.com/github/feedback/discussions/4924). # We use `--always` in case of being run as github action with shallow clone. # In this case we might in some situations see an error like # `/bin/bash: line 1: 2b55bbb: value too great for base (error token is "2b55bbb")` # which can be ignored. -GITDESC := $(shell git describe --tags --always | sed -E 's/^v//') -GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') +GITDESC := $(shell git describe --tags --always) +GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/v?[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') SEMVERPATCH := $(shell echo $$(( $(GITDESCPATCH) + 1 ))) -# Hint: The regexp in the next line only matches if there is a hyphen (`-`) -# followed by a number, by which we assume that git describe -# has added a string after the tag -SEMVER := $(shell echo '$(GITDESC)' | sed -E 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) +# Hint: The second regexp in the next line only matches +# if there is a hyphen (`-`) followed by a number, +# by which we assume that git describe has added a string after the tag +SEMVER := $(shell echo '$(GITDESC)' | sed -E -e 's/^v//' -e 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) testsemver: @echo from \'$(GITDESC)\' transformed to \'$(SEMVER)\' From c00dc36547e433f52d6dbcbf5345d6cc534c2d8a Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 21 Nov 2024 12:31:58 +0100 Subject: [PATCH 059/117] Remove `-h` for preferred hash configuration This option was in conflict with the help display. --- cmd/csaf_downloader/config.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 71c5055..619cce1 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -88,7 +88,7 @@ type config struct { ignorePattern filter.PatternMatcher //lint:ignore SA5008 We are using choice or than once: sha256, sha512 - PreferredHash hashAlgorithm `long:"preferred_hash" short:"h" choice:"sha256" choice:"sha512" value-name:"HASH" description:"HASH to prefer" toml:"preferred_hash"` + PreferredHash hashAlgorithm `long:"preferred_hash" choice:"sha256" choice:"sha512" value-name:"HASH" description:"HASH to prefer" toml:"preferred_hash"` } // configPaths are the potential file locations of the config file. From de047b76829f898ba9e22be99ca384dc0ddc7563 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Thu, 21 Nov 2024 12:53:07 +0100 Subject: [PATCH 060/117] Feat: Add prefered hash to downloader docs --- docs/csaf_downloader.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index fcf6634..6335366 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -32,6 +32,7 @@ Application Options: --logfile=FILE FILE to log downloading to (default: downloader.log) --loglevel=LEVEL[debug|info|warn|error] LEVEL of logging details (default: info) -c, --config=TOML-FILE Path to config TOML file + --preferred_hash=HASH[sha256|sha512] HASH to prefer Help Options: -h, --help Show this help message From 01645f55598e01e891c1a146eda6b9817b2e9c9c Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Thu, 21 Nov 2024 12:49:02 +0100 Subject: [PATCH 061/117] Fix: Update downloader docs --- docs/csaf_downloader.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 07c6e63..04f93b2 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -22,6 +22,7 @@ Application Options: -f, --folder=FOLDER Download into a given subFOLDER -i, --ignore_pattern=PATTERN Do not download files if their URLs match any of the given PATTERNs -H, --header= One or more extra HTTP header fields + --enumerate_pmd_only If this flag is set to true, the downloader will only enumerate valid provider metadata files, but not download documents --validator=URL URL to validate documents remotely --validator_cache=FILE FILE to cache remote validations --validator_preset=PRESETS One or more PRESETS to validate remotely (default: [mandatory]) @@ -30,8 +31,8 @@ Application Options: --forward_header= One or more extra HTTP header fields used by forwarding --forward_queue=LENGTH Maximal queue LENGTH before forwarder (default: 5) --forward_insecure Do not check TLS certificates from forward endpoint - --logfile=FILE FILE to log downloading to (default: downloader.log) - --loglevel=LEVEL[debug|info|warn|error] LEVEL of logging details (default: info) + --log_file=FILE FILE to log downloading to (default: downloader.log) + --log_level=LEVEL[debug|info|warn|error] LEVEL of logging details (default: info) -c, --config=TOML-FILE Path to config TOML file Help Options: From fe4f01d06255e67db2c5ee3f6f3e9a1453b2dea0 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Fri, 22 Nov 2024 14:52:56 +0100 Subject: [PATCH 062/117] fix: Link to file was not working (#592) --- docs/csaf_provider.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index b88924d..cb27f9f 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -4,7 +4,7 @@ The [setup docs](../README.md#setup-trusted-provider) explain how to wire this up with nginx and where the config file lives. When installed, two endpoints are offered, -and you should use the [csaf_uploader](../docs/csaf_uploader) +and you should use the [csaf_uploader](../docs/csaf_uploader.md) to access them: ### /api/create From f6d7589fde4b7208572d6a0781dd0624ecbbe582 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Fri, 22 Nov 2024 15:58:41 +0100 Subject: [PATCH 063/117] Add required upload permissions --- .github/workflows/release.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4bcd6ba..d1e370f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,6 +8,8 @@ jobs: releases-matrix: name: Release Go binaries runs-on: ubuntu-20.04 + permissions: + contents: write steps: - name: Checkout uses: actions/checkout@v3 From 9495d8b1c38ac814f10fd29762e509ed849203db Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Fri, 22 Nov 2024 16:10:54 +0100 Subject: [PATCH 064/117] Update Go 3rd party libs --- go.mod | 30 +++++++++++----------- go.sum | 80 +++++++++++++++++++++++----------------------------------- 2 files changed, 47 insertions(+), 63 deletions(-) diff --git a/go.mod b/go.mod index c8101f0..1ef2216 100644 --- a/go.mod +++ b/go.mod @@ -1,31 +1,31 @@ module github.com/gocsaf/csaf/v3 -go 1.22 +go 1.22.9 require ( - github.com/BurntSushi/toml v1.3.2 + github.com/BurntSushi/toml v1.4.0 github.com/Intevation/gval v1.3.0 github.com/Intevation/jsonpath v0.2.1 - github.com/ProtonMail/gopenpgp/v2 v2.7.4 + github.com/ProtonMail/gopenpgp/v2 v2.8.0 github.com/PuerkitoBio/goquery v1.8.1 - github.com/gofrs/flock v0.8.1 - github.com/jessevdk/go-flags v1.5.0 + github.com/gofrs/flock v0.12.1 + github.com/jessevdk/go-flags v1.6.1 github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 - go.etcd.io/bbolt v1.3.8 - golang.org/x/crypto v0.14.0 - golang.org/x/term v0.13.0 - golang.org/x/time v0.3.0 + go.etcd.io/bbolt v1.3.11 + golang.org/x/crypto v0.29.0 + golang.org/x/term v0.26.0 + golang.org/x/time v0.8.0 ) require ( - github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c // indirect + github.com/ProtonMail/go-crypto v1.1.2 // indirect github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect github.com/andybalholm/cascadia v1.3.2 // indirect - github.com/cloudflare/circl v1.3.6 // indirect + github.com/cloudflare/circl v1.5.0 // indirect github.com/pkg/errors v0.9.1 // indirect - github.com/shopspring/decimal v1.3.1 // indirect - golang.org/x/net v0.17.0 // indirect - golang.org/x/sys v0.14.0 // indirect - golang.org/x/text v0.13.0 // indirect + github.com/shopspring/decimal v1.4.0 // indirect + golang.org/x/net v0.31.0 // indirect + golang.org/x/sys v0.27.0 // indirect + golang.org/x/text v0.20.0 // indirect ) diff --git a/go.sum b/go.sum index f81653d..47637e9 100644 --- a/go.sum +++ b/go.sum @@ -1,34 +1,30 @@ -github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= -github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0= +github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/Intevation/gval v1.3.0 h1:+Ze5sft5MmGbZrHj06NVUbcxCb67l9RaPTLMNr37mjw= github.com/Intevation/gval v1.3.0/go.mod h1:xmGyGpP5be12EL0P12h+dqiYG8qn2j3PJxIgkoOHO5o= github.com/Intevation/jsonpath v0.2.1 h1:rINNQJ0Pts5XTFEG+zamtdL7l9uuE1z0FBA+r55Sw+A= github.com/Intevation/jsonpath v0.2.1/go.mod h1:WnZ8weMmwAx/fAO3SutjYFU+v7DFreNYnibV7CiaYIw= -github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= -github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c h1:kMFnB0vCcX7IL/m9Y5LO+KQYv+t1CQOiFe6+SV2J7bE= -github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/ProtonMail/go-crypto v1.1.2 h1:A7JbD57ThNqh7XjmHE+PXpQ3Dqt3BrSAC0AL0Go3KS0= +github.com/ProtonMail/go-crypto v1.1.2/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ekTTXpdwKYF8eBlsYsDVoggDAuAjoK66k= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw= -github.com/ProtonMail/gopenpgp/v2 v2.7.4 h1:Vz/8+HViFFnf2A6XX8JOvZMrA6F5puwNvvF21O1mRlo= -github.com/ProtonMail/gopenpgp/v2 v2.7.4/go.mod h1:IhkNEDaxec6NyzSI0PlxapinnwPVIESk8/76da3Ct3g= +github.com/ProtonMail/gopenpgp/v2 v2.8.0 h1:WvMv3CMcFsqKSM4/Qf8sf3tgyQkzDqQmoSE49bnBuP4= +github.com/ProtonMail/gopenpgp/v2 v2.8.0/go.mod h1:qb2GUSnmA9ipBW5GVtCtEhkummSlqs2A8Ar3S0HBgSY= github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM= github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ= github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA= github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss= github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU= -github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= -github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= -github.com/cloudflare/circl v1.3.6 h1:/xbKIqSHbZXHwkhbrhrt2YOHIwYJlXH94E3tI/gDlUg= -github.com/cloudflare/circl v1.3.6/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys= +github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= -github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= +github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc= -github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= +github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4= +github.com/jessevdk/go-flags v1.6.1/go.mod h1:Mk8T1hIAWpOiJiHa9rJASDK2UGWji0EuPGBnNLMooyc= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -37,78 +33,66 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY= -github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= +github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= -go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= +go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0= +go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= -golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= -golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= -golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= -golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= -golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= -golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= +golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= +golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= -golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg= +golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= From 7824f3b48da9c868940936b3839483d15feaf8f3 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 22 Nov 2024 16:31:56 +0100 Subject: [PATCH 065/117] Improve hash fetching and logging --- cmd/csaf_downloader/config.go | 4 +- cmd/csaf_downloader/downloader.go | 108 +++++++++++++++++++++++------- 2 files changed, 84 insertions(+), 28 deletions(-) diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 619cce1..a262ef7 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -44,8 +44,8 @@ const ( type hashAlgorithm string const ( - algSha256 = hashAlgorithm("SHA256") - algSha2512 = hashAlgorithm("SHA512") + algSha256 = hashAlgorithm("SHA256") + algSha512 = hashAlgorithm("SHA512") ) type config struct { diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 3cb7332..18fc1e8 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -25,6 +25,7 @@ import ( "os" "path" "path/filepath" + "slices" "strconv" "strings" "sync" @@ -37,6 +38,13 @@ import ( "github.com/csaf-poc/csaf_distribution/v3/util" ) +type hashFetchInfo struct { + url string + preferred bool + warn bool + hashType hashAlgorithm +} + type downloader struct { cfg *config keys *crypto.KeyRing @@ -496,35 +504,39 @@ nextAdvisory: signData []byte ) - if (d.cfg.PreferredHash != "sha512" || file.SHA512URL() == "") && file.SHA256URL() != "" { - // Only hash when we have a remote counterpart we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA256", - "url", file.SHA256URL(), - "error", err) - } else { - slog.Info("SHA256 not present", "file", file.URL()) - } - } else { - s256 = sha256.New() - writers = append(writers, s256) + hashToFetch := []hashFetchInfo{} + if file.SHA512URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA512URL(), + warn: true, + hashType: algSha512, + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512))}) + } else { + slog.Info("SHA512 not present") + } + if file.SHA256URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA256URL(), + warn: true, + hashType: algSha256, + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256))}) + } else { + slog.Info("SHA256 not present") + } + if file.IsDirectory() { + for i := range hashToFetch { + hashToFetch[i].warn = false } } - if (d.cfg.PreferredHash != "sha256" || file.SHA256URL() == "") && file.SHA512URL() != "" { - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA512", - "url", file.SHA512URL(), - "error", err) - } else { - slog.Info("SHA512 not present", "file", file.URL()) - } - } else { - s512 = sha512.New() - writers = append(writers, s512) - } + remoteSHA256, s256Data, remoteSHA512, s512Data = loadHashes(client, hashToFetch) + if remoteSHA512 != nil { + s512 = sha512.New() + writers = append(writers, s512) + } + if remoteSHA256 != nil { + s256 = sha256.New() + writers = append(writers, s256) } // Remember the data as we need to store it to file later. @@ -755,6 +767,50 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, return sign, data, nil } +func loadHashes(client util.Client, hashes []hashFetchInfo) ([]byte, []byte, []byte, []byte) { + var remoteSha256, remoteSha512, sha256Data, sha512Data []byte + + // Load preferred hashes first + slices.SortStableFunc(hashes, func(a, b hashFetchInfo) int { + if a.preferred == b.preferred { + return 0 + } + if a.preferred && !b.preferred { + return -1 + } + return 1 + }) + for _, h := range hashes { + if remote, data, err := loadHash(client, h.url); err != nil { + if h.warn { + slog.Warn("Cannot fetch hash", + "hash", h.hashType, + "url", h.url, + "error", err) + } else { + slog.Info("Hash not present", "hash", h.hashType, "file", h.url) + } + } else { + switch h.hashType { + case algSha512: + { + remoteSha512 = remote + sha512Data = data + } + case algSha256: + { + remoteSha256 = remote + sha256Data = data + } + } + if h.preferred { + break + } + } + } + return remoteSha256, sha256Data, remoteSha512, sha512Data +} + func loadHash(client util.Client, p string) ([]byte, []byte, error) { resp, err := client.Get(p) if err != nil { From b2180849e99f2b1df9dbc97a6b2d3c6d93fcc679 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Mon, 25 Nov 2024 09:38:13 +0100 Subject: [PATCH 066/117] Update README.md that go paths can be adjusted --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index cec9248..463b1d9 100644 --- a/README.md +++ b/README.md @@ -12,8 +12,8 @@ > [!IMPORTANT] > To avoid future breakage, if you still use `csaf-poc`: > 1. Adjust your HTML links. -> 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. -> +> 2. Adjust your go module paths, see [#579](https://github.com/gocsaf/csaf/issues/579#issuecomment-2497244379). +> > (This repository was moved here on 2024-10-28. The old one is deprecated > and redirection will be switched off a few months later.) From a167bf65ad14acb142dba288529ee760799f338d Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 25 Nov 2024 14:27:56 +0100 Subject: [PATCH 067/117] Add Apache 2.0 license to root folder This allows other programs like google/licensecheck to correctly detect the license. This is required to display the documentation in `pkg.go.dev`. --- LICENSE-Apache-2.0.txt | 73 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 LICENSE-Apache-2.0.txt diff --git a/LICENSE-Apache-2.0.txt b/LICENSE-Apache-2.0.txt new file mode 100644 index 0000000..137069b --- /dev/null +++ b/LICENSE-Apache-2.0.txt @@ -0,0 +1,73 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. From ffb4eff933fef6c222dd131e90675152589c8003 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 27 Nov 2024 12:15:21 +0100 Subject: [PATCH 068/117] Merge unittest into sha-handling commit 990c74a1a64cf8688a7fd14ebb524ce96a320eef Merge: 86d7ce1 7824f3b Author: koplas Date: Fri Nov 22 16:58:46 2024 +0100 Merge branch 'sha-handling' into unittest commit 86d7ce13dcf1ff2250f27b5e9b811da38937fff5 Merge: a6807d2 79b8900 Author: koplas Date: Fri Nov 22 16:54:45 2024 +0100 Merge branch 'sha-handling' into unittest commit 79b89009dd7f5dd748ccedc0ea87ea26e75b65d2 Author: koplas Date: Fri Nov 22 16:31:56 2024 +0100 Improve hash fetching and logging commit a6807d24d604cafa4e5d30d6ba9c948490d9f883 Merge: ddb5518 d18d2c3 Author: koplas Date: Fri Nov 22 16:51:55 2024 +0100 Merge branch 'sha-handling' into unittest commit d18d2c3bf17950dad276457136c2262988cca129 Author: koplas Date: Fri Nov 22 16:31:56 2024 +0100 Improve hash fetching and logging commit ddb5518c6d57adce14fb5f7665d219778e642c53 Author: koplas <54645365+koplas@users.noreply.github.com> Date: Tue Sep 17 10:45:25 2024 +0200 Extend SHA marking tests commit 13c94f4fa06c0ba3ca52f76b93934f6855d80e81 Author: koplas Date: Mon Sep 16 20:46:31 2024 +0200 Use temp directory for downloads commit 1819b4896babaf9bd1136e5846e07224fb201b18 Author: koplas Date: Mon Sep 16 20:37:55 2024 +0200 Fix rolie feed commit 989e3667bad4c10cb1a779d3a7efd526929dc002 Author: koplas Date: Mon Sep 16 20:23:22 2024 +0200 Fix provider-metadata.json commit 714735d74a159e1fd8f7e756673742708dc758d4 Author: koplas Date: Mon Sep 16 20:08:21 2024 +0200 Implement provider handler commit d488e3994749c3e7daf2c00f2a7952974a8dce49 Author: koplas Date: Mon Sep 16 16:26:37 2024 +0200 Add info about gpg key commit a9bf9da130a04fffbf00481930575d1b292d138f Author: koplas Date: Mon Sep 16 16:12:49 2024 +0200 Rename directory testdata commit 6ca6dfee25c947758fac0abfb28e10049809d3ec Author: koplas Date: Mon Sep 16 16:01:41 2024 +0200 Add initial downloader tests commit 20bee797c61a457c58b37c208f0540a5ed7d7468 Author: koplas Date: Mon Sep 16 15:58:31 2024 +0200 Fix: Remove unecessary error print commit 8e4e508073e6a8d34922295de35da42b4ea8a93a Author: koplas Date: Mon Sep 16 14:50:48 2024 +0200 Extend links test commit 3ba29f94de3eebc379adc021f40fd5cd0587b57d Author: koplas Date: Mon Sep 16 14:11:14 2024 +0200 Add initial directory feed testdata commit dee55aafd9052adcda28a231b04271d866d06dd7 Author: koplas <54645365+koplas@users.noreply.github.com> Date: Mon Sep 16 10:47:32 2024 +0200 Add initial testdata commit cd9338ae7279791db62e28e8f4b5cfe9cf370881 Author: koplas <54645365+koplas@users.noreply.github.com> Date: Thu Sep 12 15:54:42 2024 +0200 Add initial download unittests --- cmd/csaf_aggregator/client_test.go | 67 ++++++ cmd/csaf_checker/links_test.go | 80 ++++++- cmd/csaf_downloader/config.go | 4 +- cmd/csaf_downloader/downloader.go | 6 + cmd/csaf_downloader/downloader_test.go | 218 ++++++++++++++++++ csaf/providermetaloader.go | 2 +- .../openpgp/info.txt | 2 + .../openpgp/privkey.asc | 15 ++ .../openpgp/pubkey.asc | 13 ++ .../provider-metadata.json | 25 ++ .../simple-directory-provider/security.txt | 2 + .../avendor-advisory-0004-not-listed.json | 170 ++++++++++++++ .../white/avendor-advisory-0004.json | 170 ++++++++++++++ .../white/avendor-advisory-0004.json.asc | 7 + .../white/avendor-advisory-0004.json.sha256 | 1 + .../white/avendor-advisory-0004.json.sha512 | 1 + .../white/changes.csv | 1 + .../white/index.html | 6 + .../simple-directory-provider/white/index.txt | 1 + .../simple-rolie-provider/openpgp/info.txt | 2 + .../simple-rolie-provider/openpgp/privkey.asc | 15 ++ .../simple-rolie-provider/openpgp/pubkey.asc | 13 ++ .../provider-metadata.json | 33 +++ testdata/simple-rolie-provider/security.txt | 2 + testdata/simple-rolie-provider/service.json | 23 ++ .../white/avendor-advisory-0004.json | 170 ++++++++++++++ .../white/avendor-advisory-0004.json.asc | 7 + .../white/avendor-advisory-0004.json.sha256 | 1 + .../white/avendor-advisory-0004.json.sha512 | 1 + .../white/white-feed.json | 61 +++++ 30 files changed, 1115 insertions(+), 4 deletions(-) create mode 100644 cmd/csaf_aggregator/client_test.go create mode 100644 cmd/csaf_downloader/downloader_test.go create mode 100644 testdata/simple-directory-provider/openpgp/info.txt create mode 100644 testdata/simple-directory-provider/openpgp/privkey.asc create mode 100644 testdata/simple-directory-provider/openpgp/pubkey.asc create mode 100644 testdata/simple-directory-provider/provider-metadata.json create mode 100644 testdata/simple-directory-provider/security.txt create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 create mode 100644 testdata/simple-directory-provider/white/changes.csv create mode 100644 testdata/simple-directory-provider/white/index.html create mode 100644 testdata/simple-directory-provider/white/index.txt create mode 100644 testdata/simple-rolie-provider/openpgp/info.txt create mode 100644 testdata/simple-rolie-provider/openpgp/privkey.asc create mode 100644 testdata/simple-rolie-provider/openpgp/pubkey.asc create mode 100644 testdata/simple-rolie-provider/provider-metadata.json create mode 100644 testdata/simple-rolie-provider/security.txt create mode 100644 testdata/simple-rolie-provider/service.json create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 create mode 100644 testdata/simple-rolie-provider/white/white-feed.json diff --git a/cmd/csaf_aggregator/client_test.go b/cmd/csaf_aggregator/client_test.go new file mode 100644 index 0000000..c08b29a --- /dev/null +++ b/cmd/csaf_aggregator/client_test.go @@ -0,0 +1,67 @@ +// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package main + +import ( + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/csaf-poc/csaf_distribution/v3/util" +) + +func Test_downloadJSON(t *testing.T) { + tests := []struct { + name string + statusCode int + contentType string + wantErr error + }{ + { + name: "status ok, application/json", + statusCode: http.StatusOK, + contentType: "application/json", + wantErr: nil, + }, + { + name: "status found, application/json", + statusCode: http.StatusFound, + contentType: "application/json", + wantErr: errNotFound, + }, + { + name: "status ok, application/xml", + statusCode: http.StatusOK, + contentType: "application/xml", + wantErr: errNotFound, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + found := func(r io.Reader) error { + return nil + } + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Add("Content-Type", test.contentType) + w.WriteHeader(test.statusCode) + })) + defer server.Close() + hClient := http.Client{} + client := util.Client(&hClient) + if gotErr := downloadJSON(client, server.URL, found); gotErr != test.wantErr { + t.Errorf("downloadJSON: Expected %q but got %q.", test.wantErr, gotErr) + } + }) + } +} diff --git a/cmd/csaf_checker/links_test.go b/cmd/csaf_checker/links_test.go index 8abf4e6..aa04222 100644 --- a/cmd/csaf_checker/links_test.go +++ b/cmd/csaf_checker/links_test.go @@ -10,8 +10,12 @@ package main import ( "fmt" + "net/http" + "net/http/httptest" "strings" "testing" + + "github.com/csaf-poc/csaf_distribution/v3/util" ) const page0 = ` @@ -31,7 +35,6 @@ const page0 = ` ` func TestLinksOnPage(t *testing.T) { - var links []string err := linksOnPage( @@ -58,3 +61,78 @@ func TestLinksOnPage(t *testing.T) { } } } + +func Test_listed(t *testing.T) { + tests := []struct { + name string + badDirs util.Set[string] + path string + want bool + }{ + { + name: "listed path", + badDirs: util.Set[string]{}, + path: "/white/avendor-advisory-0004.json", + want: true, + }, + { + name: "badDirs contains path", + badDirs: util.Set[string]{"/white/": {}}, + path: "/white/avendor-advisory-0004.json", + want: false, + }, + { + name: "not found", + badDirs: util.Set[string]{}, + path: "/not-found/resource.json", + want: false, + }, + { + name: "badDirs does not contain path", + badDirs: util.Set[string]{"/bad-dir/": {}}, + path: "/white/avendor-advisory-0004.json", + want: true, + }, + { + name: "unlisted path", + badDirs: util.Set[string]{}, + path: "/white/avendor-advisory-0004-not-listed.json", + want: false, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + serverURL := "" + fs := http.FileServer(http.Dir("../../testdata/simple-directory-provider")) + server := httptest.NewTLSServer(fs) + defer server.Close() + + serverURL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + pgs := pages{} + cfg := config{RemoteValidator: "", RemoteValidatorCache: ""} + p, err := newProcessor(&cfg) + if err != nil { + t.Error(err) + } + p.client = client + + badDirs := util.Set[string]{} + for dir := range test.badDirs { + badDirs.Add(serverURL + dir) + } + + got, _ := pgs.listed(serverURL+test.path, p, badDirs) + if got != test.want { + t.Errorf("%q: Expected %t but got %t.", test.name, test.want, got) + } + }) + } +} diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index a262ef7..a44fa81 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -44,8 +44,8 @@ const ( type hashAlgorithm string const ( - algSha256 = hashAlgorithm("SHA256") - algSha512 = hashAlgorithm("SHA512") + algSha256 = hashAlgorithm("sha256") + algSha512 = hashAlgorithm("sha512") ) type config struct { diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 18fc1e8..ca5cccc 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -47,6 +47,7 @@ type hashFetchInfo struct { type downloader struct { cfg *config + client *util.Client // Used for testing keys *crypto.KeyRing validator csaf.RemoteValidator forwarder *forwarder @@ -131,6 +132,11 @@ func (d *downloader) httpClient() util.Client { client := util.Client(&hClient) + // Overwrite for testing purposes + if client != nil { + client = *d.client + } + // Add extra headers. if len(d.cfg.ExtraHeader) > 0 { client = &util.HeaderClient{ diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go new file mode 100644 index 0000000..cf02035 --- /dev/null +++ b/cmd/csaf_downloader/downloader_test.go @@ -0,0 +1,218 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +package main + +import ( + "context" + "errors" + "html/template" + "log/slog" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + + "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/csaf-poc/csaf_distribution/v3/util" +) + +type ProviderParams struct { + URL string + EnableSha256 bool + EnableSha512 bool +} + +func ProviderHandler(params *ProviderParams, directoryProvider bool) http.HandlerFunc { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + path := "../../testdata/" + if directoryProvider { + path += "simple-directory-provider" + } else { + path += "simple-rolie-provider" + } + + path += r.URL.Path + + if strings.HasSuffix(r.URL.Path, "/") { + path += "index.html" + } + + content, err := os.ReadFile(path) + if err != nil { + w.WriteHeader(http.StatusNotFound) + return + } + switch { + case strings.HasSuffix(path, ".html"): + w.Header().Add("Content-Type", "text/html") + case strings.HasSuffix(path, ".json"): + w.Header().Add("Content-Type", "application/json") + case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: + w.WriteHeader(http.StatusNotFound) + return + case strings.HasSuffix(path, ".sha512") && directoryProvider && !params.EnableSha512: + w.WriteHeader(http.StatusNotFound) + return + default: + w.Header().Add("Content-Type", "text/plain") + } + + tmplt, err := template.New("base").Parse(string(content)) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + err = tmplt.Execute(w, params) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + }) +} + +func checkIfFileExists(path string, t *testing.T) bool { + if _, err := os.Stat(path); err == nil { + return true + } else if errors.Is(err, os.ErrNotExist) { + return false + } else { + t.Fatalf("Failed to check if file exists: %v", err) + return false + } +} + +func TestShaMarking(t *testing.T) { + tests := []struct { + name string + directoryProvider bool + wantSha256 bool + wantSha512 bool + enableSha256 bool + enableSha512 bool + preferredHash hashAlgorithm + }{ + { + name: "want sha256 and sha512", + directoryProvider: false, + wantSha256: true, + wantSha512: true, + enableSha256: true, + enableSha512: true, + }, + { + name: "only want sha256", + directoryProvider: false, + wantSha256: true, + wantSha512: false, + enableSha256: true, + enableSha512: true, + preferredHash: algSha256, + }, + { + name: "only want sha512", + directoryProvider: false, + wantSha256: false, + wantSha512: true, + enableSha256: true, + enableSha512: true, + preferredHash: algSha512, + }, + { + name: "only want sha512", + directoryProvider: false, + wantSha256: false, + wantSha512: true, + enableSha256: true, + enableSha512: true, + preferredHash: algSha512, + }, + + { + name: "only deliver sha256", + directoryProvider: false, + wantSha256: true, + wantSha512: false, + enableSha256: true, + enableSha512: false, + preferredHash: algSha512, + }, + { + name: "only want sha256, directory provider", + directoryProvider: true, + wantSha256: true, + wantSha512: false, + enableSha256: true, + enableSha512: true, + preferredHash: algSha256, + }, + { + name: "only want sha512, directory provider", + directoryProvider: true, + wantSha256: false, + wantSha512: true, + enableSha256: true, + enableSha512: true, + preferredHash: algSha512, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + serverURL := "" + params := ProviderParams{ + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + } + server := httptest.NewTLSServer(ProviderHandler(¶ms, test.directoryProvider)) + defer server.Close() + + serverURL = server.URL + params.URL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + tempDir := t.TempDir() + cfg := config{LogLevel: &options.LogLevel{Level: slog.LevelDebug}, Directory: tempDir, PreferredHash: test.preferredHash} + err := cfg.prepare() + if err != nil { + t.Fatalf("SHA marking config failed: %v", err) + } + d, err := newDownloader(&cfg) + if err != nil { + t.Fatalf("could not init downloader: %v", err) + } + d.client = &client + + ctx := context.Background() + err = d.run(ctx, []string{serverURL + "/provider-metadata.json"}) + if err != nil { + t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) + } + d.close() + + // Check for downloaded hashes + sha256Exists := checkIfFileExists(tempDir+"/white/2020/avendor-advisory-0004.json.sha256", t) + sha512Exists := checkIfFileExists(tempDir+"/white/2020/avendor-advisory-0004.json.sha512", t) + + if sha256Exists != test.wantSha256 { + t.Errorf("%v: expected sha256 hash present to be %v, got: %v", test.name, test.wantSha256, sha256Exists) + } + + if sha512Exists != test.wantSha512 { + t.Errorf("%v: expected sha512 hash present to be %v, got: %v", test.name, test.wantSha512, sha512Exists) + } + }) + } +} diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index b21ddc6..aa3c38a 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -352,7 +352,7 @@ func (pmdl *ProviderMetadataLoader) loadFromURL(path string) *LoadedProviderMeta case len(errors) > 0: result.Messages = []ProviderMetadataLoadMessage{{ Type: SchemaValidationFailed, - Message: fmt.Sprintf("%s: Validating against JSON schema failed: %v", path, err), + Message: fmt.Sprintf("%s: Validating against JSON schema failed", path), }} for _, msg := range errors { result.Messages.Add( diff --git a/testdata/simple-directory-provider/openpgp/info.txt b/testdata/simple-directory-provider/openpgp/info.txt new file mode 100644 index 0000000..3a159f6 --- /dev/null +++ b/testdata/simple-directory-provider/openpgp/info.txt @@ -0,0 +1,2 @@ +The GPG key was generated with no passphrase and this command: +`gpg --default-new-key-algo "ed25519/cert,sign+cv25519/encr" --quick-generate-key security@example.com"` diff --git a/testdata/simple-directory-provider/openpgp/privkey.asc b/testdata/simple-directory-provider/openpgp/privkey.asc new file mode 100644 index 0000000..816f309 --- /dev/null +++ b/testdata/simple-directory-provider/openpgp/privkey.asc @@ -0,0 +1,15 @@ +-----BEGIN PGP PRIVATE KEY BLOCK----- + +lFgEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr4AAQD5G5xy/yTN5b+lvV5Ahrbz1qOZ/wmKTieGOH9GZb6JwhHwtBRzZWN1 +cml0eUBleGFtcGxlLmNvbYiZBBMWCgBBFiEEqJFMovEROcammgAY+zzZsV3mFZYF +AmbbEDcCGwMFCQWjmoAFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQ+zzZ +sV3mFZZskQEAg5Dttqm6TA7MtLxz7VSlklx95LQr9d5jm4jcOaqlGT0A/1mAAlUq +SDySFGI6DFQLcaZaUd9Yl+1b0Icr0tUiOaQHnF0EZtsQNxIKKwYBBAGXVQEFAQEH +QOTHP4FkopIGJMWXTYsaeQ1Dugd+yNYWB357vRYq6QsiAwEIBwAA/0RIazq1s8Oe +23jvNaZGb/adDYnRrkCMXXTBKsuA6WOAEhKIeAQYFgoAIBYhBKiRTKLxETnGppoA +GPs82bFd5hWWBQJm2xA3AhsMAAoJEPs82bFd5hWWDKABAOl+NoM6FBhKAvckUXDR +MLZ4k778N4Vy9VHbectjRKj1AQCO3JOmON+U6/mjohXrc2bwzKzt2yGiLP2HMxDx +uzMXBQ== +=4XHC +-----END PGP PRIVATE KEY BLOCK----- diff --git a/testdata/simple-directory-provider/openpgp/pubkey.asc b/testdata/simple-directory-provider/openpgp/pubkey.asc new file mode 100644 index 0000000..88cb720 --- /dev/null +++ b/testdata/simple-directory-provider/openpgp/pubkey.asc @@ -0,0 +1,13 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mDMEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr60FHNlY3VyaXR5QGV4YW1wbGUuY29tiJkEExYKAEEWIQSokUyi8RE5xqaa +ABj7PNmxXeYVlgUCZtsQNwIbAwUJBaOagAULCQgHAgIiAgYVCgkICwIEFgIDAQIe +BwIXgAAKCRD7PNmxXeYVlmyRAQCDkO22qbpMDsy0vHPtVKWSXH3ktCv13mObiNw5 +qqUZPQD/WYACVSpIPJIUYjoMVAtxplpR31iX7VvQhyvS1SI5pAe4OARm2xA3Egor +BgEEAZdVAQUBAQdA5Mc/gWSikgYkxZdNixp5DUO6B37I1hYHfnu9FirpCyIDAQgH +iHgEGBYKACAWIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZtsQNwIbDAAKCRD7PNmx +XeYVlgygAQDpfjaDOhQYSgL3JFFw0TC2eJO+/DeFcvVR23nLY0So9QEAjtyTpjjf +lOv5o6IV63Nm8Mys7dshoiz9hzMQ8bszFwU= +=rhGT +-----END PGP PUBLIC KEY BLOCK----- diff --git a/testdata/simple-directory-provider/provider-metadata.json b/testdata/simple-directory-provider/provider-metadata.json new file mode 100644 index 0000000..792afd3 --- /dev/null +++ b/testdata/simple-directory-provider/provider-metadata.json @@ -0,0 +1,25 @@ +{ + "canonical_url": "{{.URL}}/provider-metadata.json", + "distributions": [ + { + "directory_url": "{{.URL}}/white/" + } + ], + "last_updated": "2020-01-01T00:00:00Z", + "list_on_CSAF_aggregators": true, + "metadata_version": "2.0", + "mirror_on_CSAF_aggregators": true, + "public_openpgp_keys": [ + { + "fingerprint": "A8914CA2F11139C6A69A0018FB3CD9B15DE61596", + "url": "{{.URL}}/openpgp/pubkey.asc" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc", + "namespace": "https://example.com", + "contact_details": "mailto:security@example.com" + }, + "role": "csaf_trusted_provider" +} diff --git a/testdata/simple-directory-provider/security.txt b/testdata/simple-directory-provider/security.txt new file mode 100644 index 0000000..0ae943d --- /dev/null +++ b/testdata/simple-directory-provider/security.txt @@ -0,0 +1,2 @@ +CSAF: /provider-metadata.json + diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json b/testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json new file mode 100644 index 0000000..0e194e9 --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json @@ -0,0 +1,170 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json b/testdata/simple-directory-provider/white/avendor-advisory-0004.json new file mode 100644 index 0000000..0e194e9 --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json @@ -0,0 +1,170 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc new file mode 100644 index 0000000..9dff47b --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc @@ -0,0 +1,7 @@ +-----BEGIN PGP SIGNATURE----- + +iHUEABYKAB0WIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZukv9QAKCRD7PNmxXeYV +ljq0AP9n/rTgoNCJzSTZzNrrMy28ZR+Ppp1MSPWGFUzsx6qLJgD/d8cu0lokMsXf +y0uc9k7hrla/ajFUzNt3AVvT+CPFtAo= +=7E66 +-----END PGP SIGNATURE----- diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 new file mode 100644 index 0000000..851b27c --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 @@ -0,0 +1 @@ +cb263bf1beab18b893de63f2966d0d8c5f38d60101c24d3fd7a5feebaad02c3b avendor-advisory-0004.json diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 new file mode 100644 index 0000000..6703550 --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 @@ -0,0 +1 @@ +39476e1d08a0871d166091c90de259544382a3599eebda118a93468499a30fd034286086c461a97d3d5298e093b0be3868e8d89d8a6a255c4aa6adb81ebbfcad avendor-advisory-0004.json diff --git a/testdata/simple-directory-provider/white/changes.csv b/testdata/simple-directory-provider/white/changes.csv new file mode 100644 index 0000000..4acdb29 --- /dev/null +++ b/testdata/simple-directory-provider/white/changes.csv @@ -0,0 +1 @@ +"avendor-advisory-0004.json","2020-01-01T00:00:00+00:00" diff --git a/testdata/simple-directory-provider/white/index.html b/testdata/simple-directory-provider/white/index.html new file mode 100644 index 0000000..bcfabd9 --- /dev/null +++ b/testdata/simple-directory-provider/white/index.html @@ -0,0 +1,6 @@ + + + + avendor-advisory-0004 + + diff --git a/testdata/simple-directory-provider/white/index.txt b/testdata/simple-directory-provider/white/index.txt new file mode 100644 index 0000000..d19d30f --- /dev/null +++ b/testdata/simple-directory-provider/white/index.txt @@ -0,0 +1 @@ +avendor-advisory-0004.json diff --git a/testdata/simple-rolie-provider/openpgp/info.txt b/testdata/simple-rolie-provider/openpgp/info.txt new file mode 100644 index 0000000..3a159f6 --- /dev/null +++ b/testdata/simple-rolie-provider/openpgp/info.txt @@ -0,0 +1,2 @@ +The GPG key was generated with no passphrase and this command: +`gpg --default-new-key-algo "ed25519/cert,sign+cv25519/encr" --quick-generate-key security@example.com"` diff --git a/testdata/simple-rolie-provider/openpgp/privkey.asc b/testdata/simple-rolie-provider/openpgp/privkey.asc new file mode 100644 index 0000000..816f309 --- /dev/null +++ b/testdata/simple-rolie-provider/openpgp/privkey.asc @@ -0,0 +1,15 @@ +-----BEGIN PGP PRIVATE KEY BLOCK----- + +lFgEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr4AAQD5G5xy/yTN5b+lvV5Ahrbz1qOZ/wmKTieGOH9GZb6JwhHwtBRzZWN1 +cml0eUBleGFtcGxlLmNvbYiZBBMWCgBBFiEEqJFMovEROcammgAY+zzZsV3mFZYF +AmbbEDcCGwMFCQWjmoAFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQ+zzZ +sV3mFZZskQEAg5Dttqm6TA7MtLxz7VSlklx95LQr9d5jm4jcOaqlGT0A/1mAAlUq +SDySFGI6DFQLcaZaUd9Yl+1b0Icr0tUiOaQHnF0EZtsQNxIKKwYBBAGXVQEFAQEH +QOTHP4FkopIGJMWXTYsaeQ1Dugd+yNYWB357vRYq6QsiAwEIBwAA/0RIazq1s8Oe +23jvNaZGb/adDYnRrkCMXXTBKsuA6WOAEhKIeAQYFgoAIBYhBKiRTKLxETnGppoA +GPs82bFd5hWWBQJm2xA3AhsMAAoJEPs82bFd5hWWDKABAOl+NoM6FBhKAvckUXDR +MLZ4k778N4Vy9VHbectjRKj1AQCO3JOmON+U6/mjohXrc2bwzKzt2yGiLP2HMxDx +uzMXBQ== +=4XHC +-----END PGP PRIVATE KEY BLOCK----- diff --git a/testdata/simple-rolie-provider/openpgp/pubkey.asc b/testdata/simple-rolie-provider/openpgp/pubkey.asc new file mode 100644 index 0000000..88cb720 --- /dev/null +++ b/testdata/simple-rolie-provider/openpgp/pubkey.asc @@ -0,0 +1,13 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mDMEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr60FHNlY3VyaXR5QGV4YW1wbGUuY29tiJkEExYKAEEWIQSokUyi8RE5xqaa +ABj7PNmxXeYVlgUCZtsQNwIbAwUJBaOagAULCQgHAgIiAgYVCgkICwIEFgIDAQIe +BwIXgAAKCRD7PNmxXeYVlmyRAQCDkO22qbpMDsy0vHPtVKWSXH3ktCv13mObiNw5 +qqUZPQD/WYACVSpIPJIUYjoMVAtxplpR31iX7VvQhyvS1SI5pAe4OARm2xA3Egor +BgEEAZdVAQUBAQdA5Mc/gWSikgYkxZdNixp5DUO6B37I1hYHfnu9FirpCyIDAQgH +iHgEGBYKACAWIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZtsQNwIbDAAKCRD7PNmx +XeYVlgygAQDpfjaDOhQYSgL3JFFw0TC2eJO+/DeFcvVR23nLY0So9QEAjtyTpjjf +lOv5o6IV63Nm8Mys7dshoiz9hzMQ8bszFwU= +=rhGT +-----END PGP PUBLIC KEY BLOCK----- diff --git a/testdata/simple-rolie-provider/provider-metadata.json b/testdata/simple-rolie-provider/provider-metadata.json new file mode 100644 index 0000000..7abb316 --- /dev/null +++ b/testdata/simple-rolie-provider/provider-metadata.json @@ -0,0 +1,33 @@ +{ + "canonical_url": "{{.URL}}/provider-metadata.json", + "distributions": [ + { + "rolie": { + "feeds": [ + { + "summary": "TLP:WHITE advisories", + "tlp_label": "WHITE", + "url": "{{.URL}}/white/white-feed.json" + } + ] + } + } + ], + "last_updated": "2020-01-01T00:00:00Z", + "list_on_CSAF_aggregators": true, + "metadata_version": "2.0", + "mirror_on_CSAF_aggregators": true, + "public_openpgp_keys": [ + { + "fingerprint": "A8914CA2F11139C6A69A0018FB3CD9B15DE61596", + "url": "{{.URL}}/openpgp/pubkey.asc" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc", + "namespace": "https://example.com", + "contact_details": "mailto:security@example.com" + }, + "role": "csaf_trusted_provider" +} diff --git a/testdata/simple-rolie-provider/security.txt b/testdata/simple-rolie-provider/security.txt new file mode 100644 index 0000000..0ae943d --- /dev/null +++ b/testdata/simple-rolie-provider/security.txt @@ -0,0 +1,2 @@ +CSAF: /provider-metadata.json + diff --git a/testdata/simple-rolie-provider/service.json b/testdata/simple-rolie-provider/service.json new file mode 100644 index 0000000..500d882 --- /dev/null +++ b/testdata/simple-rolie-provider/service.json @@ -0,0 +1,23 @@ +{ + "service": { + "workspace": [ + { + "title": "CSAF feeds", + "collection": [ + { + "title": "CSAF feed (TLP:WHITE)", + "href": "/white/white-feed.json", + "categories": { + "category": [ + { + "scheme": "urn:ietf:params:rolie:category:information-type", + "term": "csaf" + } + ] + } + } + ] + } + ] + } +} diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json new file mode 100644 index 0000000..0e194e9 --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json @@ -0,0 +1,170 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc new file mode 100644 index 0000000..9dff47b --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc @@ -0,0 +1,7 @@ +-----BEGIN PGP SIGNATURE----- + +iHUEABYKAB0WIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZukv9QAKCRD7PNmxXeYV +ljq0AP9n/rTgoNCJzSTZzNrrMy28ZR+Ppp1MSPWGFUzsx6qLJgD/d8cu0lokMsXf +y0uc9k7hrla/ajFUzNt3AVvT+CPFtAo= +=7E66 +-----END PGP SIGNATURE----- diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 new file mode 100644 index 0000000..851b27c --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 @@ -0,0 +1 @@ +cb263bf1beab18b893de63f2966d0d8c5f38d60101c24d3fd7a5feebaad02c3b avendor-advisory-0004.json diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 new file mode 100644 index 0000000..6703550 --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 @@ -0,0 +1 @@ +39476e1d08a0871d166091c90de259544382a3599eebda118a93468499a30fd034286086c461a97d3d5298e093b0be3868e8d89d8a6a255c4aa6adb81ebbfcad avendor-advisory-0004.json diff --git a/testdata/simple-rolie-provider/white/white-feed.json b/testdata/simple-rolie-provider/white/white-feed.json new file mode 100644 index 0000000..1bc17bc --- /dev/null +++ b/testdata/simple-rolie-provider/white/white-feed.json @@ -0,0 +1,61 @@ +{ + "feed": { + "id": "csaf-feed-tlp-white", + "title": "CSAF feed (TLP:WHITE)", + "link": [ + { + "rel": "self", + "href": "/white/csaf-feed-tlp-white.json" + }, + { + "rel": "service", + "href": "/service.json" + } + ], + "category": [ + { + "scheme": "urn:ietf:params:rolie:category:information-type", + "term": "csaf" + } + ], + "updated": "2020-01-01T00:00:00Z", + "entry": [ + { + "id": "Avendor-advisory-0004", + "title": "Test CSAF document", + "link": [ + { + "rel": "self", + "href": "/white/avendor-advisory-0004.json" + }, + {{if .EnableSha256}} + { + "rel": "hash", + "href": "/white/avendor-advisory-0004.json.sha256" + }, + {{end}} + {{if .EnableSha512}} + { + "rel": "hash", + "href": "/white/avendor-advisory-0004.json.sha512" + }, + {{end}} + { + "rel": "signature", + "href": "/white/avendor-advisory-0004.json.asc" + } + ], + "published": "2020-01-01T00:00:00Z", + "updated": "2020-01-01T00:00:00Z", + "content": { + "type": "application/json", + "src": "/avendor-advisory-0004.json" + }, + "format": { + "schema": "https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json", + "version": "2.0" + } + } + ] + } +} From 56509bbb4d868454d01e3b7ce9dffd8bdb658e58 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 27 Nov 2024 12:51:38 +0100 Subject: [PATCH 069/117] Use new path in tests --- cmd/csaf_aggregator/client_test.go | 2 +- cmd/csaf_checker/links_test.go | 2 +- cmd/csaf_downloader/downloader_test.go | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cmd/csaf_aggregator/client_test.go b/cmd/csaf_aggregator/client_test.go index c08b29a..fc5b095 100644 --- a/cmd/csaf_aggregator/client_test.go +++ b/cmd/csaf_aggregator/client_test.go @@ -14,7 +14,7 @@ import ( "net/http/httptest" "testing" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) func Test_downloadJSON(t *testing.T) { diff --git a/cmd/csaf_checker/links_test.go b/cmd/csaf_checker/links_test.go index aa04222..6baccf8 100644 --- a/cmd/csaf_checker/links_test.go +++ b/cmd/csaf_checker/links_test.go @@ -15,7 +15,7 @@ import ( "strings" "testing" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) const page0 = ` diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go index cf02035..1ae1524 100644 --- a/cmd/csaf_downloader/downloader_test.go +++ b/cmd/csaf_downloader/downloader_test.go @@ -19,8 +19,8 @@ import ( "strings" "testing" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/util" ) type ProviderParams struct { From b8a98033bf3721bdec6a055dfb07873e2306e512 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 28 Nov 2024 15:58:20 +0100 Subject: [PATCH 070/117] fix docs link to standard --- docs/proxy-provider-for-aggregator.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/proxy-provider-for-aggregator.md b/docs/proxy-provider-for-aggregator.md index f34d714..4148f52 100644 --- a/docs/proxy-provider-for-aggregator.md +++ b/docs/proxy-provider-for-aggregator.md @@ -5,7 +5,9 @@ calls it a *CSAF publisher*. After manually downloading the advisories from such a publisher, the tools here can be used to offer the CSAF files for automated downloading -as *CSAF aggregator*. (The construct is called *CSAF proxy provider*. See [Section 7.2.5](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html#725-role-csaf-aggregator) for more details.) +as *CSAF aggregator*. (The construct is called *CSAF proxy provider*. +See [Section 7.2.5](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html#725-role-csaf-aggregator) +for more details.) There are three necessary steps, easiest is to use one single virtual maschine (or container) per internal provider. From 1daaed2c516d3fd674eb99c39dfc5f87ba43f78a Mon Sep 17 00:00:00 2001 From: ncsc-ie-devs <112564016+ncsc-ie-devs@users.noreply.github.com> Date: Mon, 2 Dec 2024 10:42:54 +0000 Subject: [PATCH 071/117] ensure HTTP requests use proxy env vars (#597) * fix: ensure HTTP requests use proxy env vars Updated all instances of `http.Transport` to include the `Proxy` field set to `http.ProxyFromEnvironment`. This ensures that the application respects proxy configuration defined by the `HTTP_PROXY`, `HTTPS_PROXY`, and `NO_PROXY` environment variables. ### Changes: - Modified `http.Transport` initialization across the codebase to use: ```go Proxy: http.ProxyFromEnvironment ``` - Ensured TLS configurations remain intact by preserving `TLSClientConfig`. ### Why: - Previously, HTTP requests bypassed proxy settings due to missing configuration in the transport layer. - This fix enables compatibility with proxied environments, aligning with standard Go behavior. ### Impact: - All HTTP and HTTPS traffic now adheres to proxy settings. - Domains listed in `NO_PROXY` bypass the proxy as expected. ### Verification: - Tested with proxy environment variables set (`HTTP_PROXY`, `HTTPS_PROXY`). - Verified requests route through the proxy and `NO_PROXY` works as intended. * reformat with fmt --------- Co-authored-by: Cormac Doherty --- cmd/csaf_aggregator/config.go | 1 + cmd/csaf_checker/processor.go | 2 ++ cmd/csaf_downloader/downloader.go | 1 + cmd/csaf_downloader/forwarder.go | 1 + cmd/csaf_uploader/processor.go | 1 + 5 files changed, 6 insertions(+) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 81db0b7..3c2c46b 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -284,6 +284,7 @@ func (c *config) httpClient(p *provider) util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 5fd3fbd..5d1b69b 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -430,6 +430,7 @@ func (p *processor) fullClient() util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) @@ -460,6 +461,7 @@ func (p *processor) basicClient() *http.Client { if p.cfg.Insecure { tr := &http.Transport{ TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + Proxy: http.ProxyFromEnvironment, } return &http.Client{Transport: tr} } diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index f21fcc0..b7e7342 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -121,6 +121,7 @@ func (d *downloader) httpClient() util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index 12d9fe4..1598283 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -106,6 +106,7 @@ func (f *forwarder) httpClient() util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) diff --git a/cmd/csaf_uploader/processor.go b/cmd/csaf_uploader/processor.go index b57cafb..f655e02 100644 --- a/cmd/csaf_uploader/processor.go +++ b/cmd/csaf_uploader/processor.go @@ -51,6 +51,7 @@ func (p *processor) httpClient() *http.Client { client.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } return &client From 57953e495f10c26312a05eec3d1e7acb2a40e363 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 13:23:57 +0100 Subject: [PATCH 072/117] Warn if no remote validator was specified --- cmd/csaf_validator/main.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index b07c2f4..6985509 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -69,6 +69,8 @@ func run(opts *options, files []string) error { "preparing remote validator failed: %w", err) } defer validator.Close() + } else { + log.Printf("warn: no remote validator specified") } // Select amount level of output for remote validation. From 938ceb872ac4b5460379c86b89b6ca0db6ed72f2 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 13:53:56 +0100 Subject: [PATCH 073/117] Return exit code based on validation result --- cmd/csaf_validator/main.go | 13 +++++++++++++ docs/csaf_validator.md | 7 +++++++ 2 files changed, 20 insertions(+) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 6985509..4a9e827 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -22,6 +22,13 @@ import ( "github.com/gocsaf/csaf/v3/util" ) +const ( + exitCodeAllValid = 0 + exitCodeSchemaInvalid = 1 << 0 + exitCodeNoRemoteValidator = 1 << 1 + exitCodeFailedRemoteValidation = 1 << 2 +) + type options struct { Version bool `long:"version" description:"Display version of the binary"` RemoteValidator string `long:"validator" description:"URL to validate documents remotely" value-name:"URL"` @@ -53,6 +60,7 @@ func main() { // run validates the given files. func run(opts *options, files []string) error { + exitCode := exitCodeAllValid var validator csaf.RemoteValidator eval := util.NewPathEval() @@ -70,6 +78,7 @@ func run(opts *options, files []string) error { } defer validator.Close() } else { + exitCode |= exitCodeNoRemoteValidator log.Printf("warn: no remote validator specified") } @@ -106,6 +115,7 @@ func run(opts *options, files []string) error { } if len(validationErrs) > 0 { + exitCode |= exitCodeSchemaInvalid fmt.Printf("schema validation errors of %q\n", file) for _, vErr := range validationErrs { fmt.Printf(" * %s\n", vErr) @@ -132,12 +142,15 @@ func run(opts *options, files []string) error { if rvr.Valid { passes = "passes" } else { + exitCode |= exitCodeFailedRemoteValidation passes = "does not pass" } fmt.Printf("%q %s remote validation.\n", file, passes) } } + // Exit code is based on validation results + os.Exit(exitCodeAllValid) return nil } diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index dfa0c9a..74dbaaf 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -2,6 +2,13 @@ is a tool to validate local advisories files against the JSON Schema and an optional remote validator. +### Exit codes +If no fatal error occurs the program will exit with the following codes: +- `0`: all valid +- `2⁰`: schema invalid +- `2¹`: no remote validator configured +- `2²`: failure in remote validation + ### Usage ``` From 16e86051c5d1b0912a179eb2b30ba568da4e81ce Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Wed, 4 Dec 2024 14:27:24 +0100 Subject: [PATCH 074/117] Be more precise about exit codes. --- cmd/csaf_validator/main.go | 8 ++++---- docs/csaf_validator.md | 10 +++++----- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 4a9e827..9e844b7 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -23,10 +23,10 @@ import ( ) const ( - exitCodeAllValid = 0 - exitCodeSchemaInvalid = 1 << 0 - exitCodeNoRemoteValidator = 1 << 1 - exitCodeFailedRemoteValidation = 1 << 2 + exitCodeSchemaInvalid = 1 << iota + exitCodeNoRemoteValidator + exitCodeFailedRemoteValidation + exitCodeAllValid = 0 ) type options struct { diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index 74dbaaf..64ded6d 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -3,11 +3,11 @@ is a tool to validate local advisories files against the JSON Schema and an optional remote validator. ### Exit codes -If no fatal error occurs the program will exit with the following codes: -- `0`: all valid -- `2⁰`: schema invalid -- `2¹`: no remote validator configured -- `2²`: failure in remote validation +If no fatal error occurs the program will exit with an exit code `n` with the following conditions: +- `n == 0`: all valid +- `(n / 2) % 1 == 1`: schema validation failed +- `(n / 4) % 1 == 1`: no remote validator configured +- `(n / 8) % 1 == 1`: failure in remote validation ### Usage From a51964be3f6a9360ed0c4e05ccc5bcc8418d0f7e Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 16:02:03 +0100 Subject: [PATCH 075/117] Add initial csaf_checker provider test --- cmd/csaf_checker/processor_test.go | 103 +++++++++++++++++++++++++ cmd/csaf_downloader/downloader_test.go | 62 +-------------- internal/testutil/testutil.go | 73 ++++++++++++++++++ 3 files changed, 179 insertions(+), 59 deletions(-) create mode 100644 cmd/csaf_checker/processor_test.go create mode 100644 internal/testutil/testutil.go diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go new file mode 100644 index 0000000..b8b1b1f --- /dev/null +++ b/cmd/csaf_checker/processor_test.go @@ -0,0 +1,103 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +package main + +import ( + "net/http/httptest" + "testing" + + "github.com/gocsaf/csaf/v3/internal/testutil" + "github.com/gocsaf/csaf/v3/util" +) + +func TestShaMarking(t *testing.T) { + tests := []struct { + name string + directoryProvider bool + enableSha256 bool + enableSha512 bool + }{ + { + name: "deliver sha256 and sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + }, + { + name: "only deliver sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: false, + }, + { + name: "only deliver sha512", + directoryProvider: false, + enableSha256: false, + enableSha512: true, + }, + { + name: "only deliver sha256 and sha512, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: true, + }, + { + name: "only deliver sha256, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: false, + }, + { + name: "only deliver sha512, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: true, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + serverURL := "" + params := testutil.ProviderParams{ + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + } + server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) + defer server.Close() + + serverURL = server.URL + params.URL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + cfg := config{} + err := cfg.prepare() + if err != nil { + t.Fatalf("SHA marking config failed: %v", err) + } + p, err := newProcessor(&cfg) + if err != nil { + t.Fatalf("could not init downloader: %v", err) + } + p.client = client + + // TODO check result of processor + _, err = p.run([]string{serverURL + "/provider-metadata.json"}) + if err != nil { + t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) + } + p.close() + }) + } +} diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go index 1ae1524..d7eaae3 100644 --- a/cmd/csaf_downloader/downloader_test.go +++ b/cmd/csaf_downloader/downloader_test.go @@ -11,72 +11,16 @@ package main import ( "context" "errors" - "html/template" "log/slog" - "net/http" "net/http/httptest" "os" - "strings" "testing" "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/testutil" "github.com/gocsaf/csaf/v3/util" ) -type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool -} - -func ProviderHandler(params *ProviderParams, directoryProvider bool) http.HandlerFunc { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - path := "../../testdata/" - if directoryProvider { - path += "simple-directory-provider" - } else { - path += "simple-rolie-provider" - } - - path += r.URL.Path - - if strings.HasSuffix(r.URL.Path, "/") { - path += "index.html" - } - - content, err := os.ReadFile(path) - if err != nil { - w.WriteHeader(http.StatusNotFound) - return - } - switch { - case strings.HasSuffix(path, ".html"): - w.Header().Add("Content-Type", "text/html") - case strings.HasSuffix(path, ".json"): - w.Header().Add("Content-Type", "application/json") - case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: - w.WriteHeader(http.StatusNotFound) - return - case strings.HasSuffix(path, ".sha512") && directoryProvider && !params.EnableSha512: - w.WriteHeader(http.StatusNotFound) - return - default: - w.Header().Add("Content-Type", "text/plain") - } - - tmplt, err := template.New("base").Parse(string(content)) - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - return - } - err = tmplt.Execute(w, params) - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - return - } - }) -} - func checkIfFileExists(path string, t *testing.T) bool { if _, err := os.Stat(path); err == nil { return true @@ -169,12 +113,12 @@ func TestShaMarking(t *testing.T) { t.Run(test.name, func(tt *testing.T) { tt.Parallel() serverURL := "" - params := ProviderParams{ + params := testutil.ProviderParams{ URL: "", EnableSha256: test.enableSha256, EnableSha512: test.enableSha512, } - server := httptest.NewTLSServer(ProviderHandler(¶ms, test.directoryProvider)) + server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) defer server.Close() serverURL = server.URL diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go new file mode 100644 index 0000000..455d217 --- /dev/null +++ b/internal/testutil/testutil.go @@ -0,0 +1,73 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +// Package testutil contains shared helper functions for testing the application. +package testutil + +import ( + "html/template" + "net/http" + "os" + "strings" +) + +// ProviderParams configures the test provider. +type ProviderParams struct { + URL string + EnableSha256 bool + EnableSha512 bool +} + +// ProviderHandler returns a test provider handler with the specified configuration. +func ProviderHandler(params *ProviderParams, directoryProvider bool) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + path := "../../testdata/" + if directoryProvider { + path += "simple-directory-provider" + } else { + path += "simple-rolie-provider" + } + + path += r.URL.Path + + if strings.HasSuffix(r.URL.Path, "/") { + path += "index.html" + } + + content, err := os.ReadFile(path) + if err != nil { + w.WriteHeader(http.StatusNotFound) + return + } + switch { + case strings.HasSuffix(path, ".html"): + w.Header().Add("Content-Type", "text/html") + case strings.HasSuffix(path, ".json"): + w.Header().Add("Content-Type", "application/json") + case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: + w.WriteHeader(http.StatusNotFound) + return + case strings.HasSuffix(path, ".sha512") && directoryProvider && !params.EnableSha512: + w.WriteHeader(http.StatusNotFound) + return + default: + w.Header().Add("Content-Type", "text/plain") + } + + tmplt, err := template.New("base").Parse(string(content)) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + err = tmplt.Execute(w, params) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + } +} From 5b6af7a4ad26bb53795e94fe3576a636b0b81df1 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 17:52:00 +0100 Subject: [PATCH 076/117] WIP: Add requirement tests --- cmd/csaf_checker/processor_test.go | 106 +++++++++++++++++- testdata/simple-rolie-provider/service.json | 2 +- .../white/white-feed.json | 14 +-- 3 files changed, 112 insertions(+), 10 deletions(-) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index b8b1b1f..73574bd 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -9,55 +9,150 @@ package main import ( + "fmt" "net/http/httptest" + "reflect" "testing" "github.com/gocsaf/csaf/v3/internal/testutil" "github.com/gocsaf/csaf/v3/util" ) +func getBaseRequirements(url string) []Requirement { + return []Requirement{ + { + Num: 1, + Description: "Valid CSAF documents", + Messages: []Message{{Type: 1, Text: "No remote validator configured"}, {Type: 0, Text: "All advisories validated fine against the schema."}}, + }, { + Num: 2, + Description: "Filename", + Messages: []Message{{Type: 0, Text: "All found filenames are conforming."}}}, + { + Num: 3, + Description: "TLS", + Messages: []Message{{Type: 0, Text: "All tested URLs were HTTPS."}}}, + { + Num: 4, + Description: "TLP:WHITE", + Messages: []Message{{Type: 0, Text: "All advisories labeled TLP:WHITE were freely accessible."}}}, + { + Num: 5, + Description: "TLP:AMBER and TLP:RED", + Messages: []Message{ + {Type: 0, Text: "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility."}}}, + { + Num: 6, + Description: "Redirects", + Messages: []Message{{Type: 0, Text: "No redirections found."}}}, + { + Num: 7, + Description: "provider-metadata.json", + Messages: []Message{{Type: 0, Text: "Found good provider metadata."}}}, + { + Num: 8, + Description: "security.txt", + Messages: []Message{{Type: 0, Text: "Performed no test of security.txt since the direct url of the provider-metadata.json was used."}}}, + { + Num: 9, + Description: "/.well-known/csaf/provider-metadata.json", + Messages: []Message{{Type: 0, Text: "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used."}}}, + { + Num: 10, + Description: "DNS path", + Messages: []Message{{Type: 0, Text: "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used."}}}, + { + Num: 11, + Description: "One folder per year", + Messages: []Message{{Type: 2, Text: fmt.Sprintf("No year folder found in %s/white/avendor-advisory-0004.json", url)}}}, + { + Num: 12, + Description: "index.txt", + Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/index.txt", url)}}}, + { + Num: 13, + Description: "changes.csv", + Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/changes.csv", url)}}}, + { + Num: 14, + Description: "Directory listings", + Messages: []Message{{Type: 0, Text: "All directory listings are valid."}}}, + { + Num: 15, + Description: "ROLIE feed", + Messages: []Message{{Type: 2, Text: "ROLIE feed based distribution was not used."}}}, + { + Num: 16, + Description: "ROLIE service document", + Messages: []Message{{Type: 1, Text: "No ROLIE service document found."}}}, + { + Num: 17, + Description: "ROLIE category document", + Messages: []Message{{Type: 1, Text: "No ROLIE category document found."}}}, + { + Num: 18, + Description: "Integrity", + Messages: []Message{{Type: 0, Text: "All checksums match."}}}, + { + Num: 19, + Description: "Signatures", + Messages: []Message{{Type: 0, Text: "All signatures verified."}}}, + { + Num: 20, + Description: "Public OpenPGP Key", + Messages: []Message{{Type: 0, Text: "1 public OpenPGP key(s) loaded."}}}, + } +} + func TestShaMarking(t *testing.T) { tests := []struct { name string directoryProvider bool enableSha256 bool enableSha512 bool + expected func(string) []Requirement }{ { name: "deliver sha256 and sha512", directoryProvider: false, enableSha256: true, enableSha512: true, + expected: getBaseRequirements, }, { name: "only deliver sha256", directoryProvider: false, enableSha256: true, enableSha512: false, + expected: getBaseRequirements, }, { name: "only deliver sha512", directoryProvider: false, enableSha256: false, enableSha512: true, + expected: getBaseRequirements, }, { name: "only deliver sha256 and sha512, directory provider", directoryProvider: true, enableSha256: true, enableSha512: true, + expected: getBaseRequirements, }, { name: "only deliver sha256, directory provider", directoryProvider: true, enableSha256: true, enableSha512: false, + expected: getBaseRequirements, }, { name: "only deliver sha512, directory provider", directoryProvider: true, enableSha256: false, enableSha512: true, + expected: getBaseRequirements, }, } @@ -92,11 +187,18 @@ func TestShaMarking(t *testing.T) { } p.client = client - // TODO check result of processor - _, err = p.run([]string{serverURL + "/provider-metadata.json"}) + report, err := p.run([]string{serverURL + "/provider-metadata.json"}) if err != nil { t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) } + expected := test.expected(serverURL) + for i, got := range report.Domains[0].Requirements { + want := expected[i] + if !reflect.DeepEqual(*got, want) { + t.Errorf("SHA marking %v: Expected %v, got %v", test.name, want, *got) + } + } + p.close() }) } diff --git a/testdata/simple-rolie-provider/service.json b/testdata/simple-rolie-provider/service.json index 500d882..a398a40 100644 --- a/testdata/simple-rolie-provider/service.json +++ b/testdata/simple-rolie-provider/service.json @@ -6,7 +6,7 @@ "collection": [ { "title": "CSAF feed (TLP:WHITE)", - "href": "/white/white-feed.json", + "href": "{{.URL}}/white/white-feed.json", "categories": { "category": [ { diff --git a/testdata/simple-rolie-provider/white/white-feed.json b/testdata/simple-rolie-provider/white/white-feed.json index 1bc17bc..923a492 100644 --- a/testdata/simple-rolie-provider/white/white-feed.json +++ b/testdata/simple-rolie-provider/white/white-feed.json @@ -5,11 +5,11 @@ "link": [ { "rel": "self", - "href": "/white/csaf-feed-tlp-white.json" + "href": "{{.URL}}/white/csaf-feed-tlp-white.json" }, { "rel": "service", - "href": "/service.json" + "href": "{{.URL}}/service.json" } ], "category": [ @@ -26,30 +26,30 @@ "link": [ { "rel": "self", - "href": "/white/avendor-advisory-0004.json" + "href": "{{.URL}}/white/avendor-advisory-0004.json" }, {{if .EnableSha256}} { "rel": "hash", - "href": "/white/avendor-advisory-0004.json.sha256" + "href": "{{.URL}}/white/avendor-advisory-0004.json.sha256" }, {{end}} {{if .EnableSha512}} { "rel": "hash", - "href": "/white/avendor-advisory-0004.json.sha512" + "href": "{{.URL}}/white/avendor-advisory-0004.json.sha512" }, {{end}} { "rel": "signature", - "href": "/white/avendor-advisory-0004.json.asc" + "href": "{{.URL}}/white/avendor-advisory-0004.json.asc" } ], "published": "2020-01-01T00:00:00Z", "updated": "2020-01-01T00:00:00Z", "content": { "type": "application/json", - "src": "/avendor-advisory-0004.json" + "src": "{{.URL}}/avendor-advisory-0004.json" }, "format": { "schema": "https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json", From 68bd04676cc425dca87751bca989457baf5f56a1 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 6 Dec 2024 13:11:07 +0100 Subject: [PATCH 077/117] Add requirement checker test data --- cmd/csaf_checker/processor_test.go | 147 +++++------- .../sha256-directory.json | 206 +++++++++++++++++ .../processor-requirements/sha256-rolie.json | 210 ++++++++++++++++++ .../sha256-sha512-directory.json | 206 +++++++++++++++++ .../sha256-sha512-rolie.json | 210 ++++++++++++++++++ .../sha512-directory.json | 207 +++++++++++++++++ .../processor-requirements/sha512-rolie.json | 210 ++++++++++++++++++ 7 files changed, 1299 insertions(+), 97 deletions(-) create mode 100644 testdata/processor-requirements/sha256-directory.json create mode 100644 testdata/processor-requirements/sha256-rolie.json create mode 100644 testdata/processor-requirements/sha256-sha512-directory.json create mode 100644 testdata/processor-requirements/sha256-sha512-rolie.json create mode 100644 testdata/processor-requirements/sha512-directory.json create mode 100644 testdata/processor-requirements/sha512-rolie.json diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 73574bd..c4fb532 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -9,99 +9,54 @@ package main import ( - "fmt" + "bytes" + "encoding/json" "net/http/httptest" + "os" "reflect" "testing" + "text/template" "github.com/gocsaf/csaf/v3/internal/testutil" "github.com/gocsaf/csaf/v3/util" ) -func getBaseRequirements(url string) []Requirement { - return []Requirement{ - { - Num: 1, - Description: "Valid CSAF documents", - Messages: []Message{{Type: 1, Text: "No remote validator configured"}, {Type: 0, Text: "All advisories validated fine against the schema."}}, - }, { - Num: 2, - Description: "Filename", - Messages: []Message{{Type: 0, Text: "All found filenames are conforming."}}}, - { - Num: 3, - Description: "TLS", - Messages: []Message{{Type: 0, Text: "All tested URLs were HTTPS."}}}, - { - Num: 4, - Description: "TLP:WHITE", - Messages: []Message{{Type: 0, Text: "All advisories labeled TLP:WHITE were freely accessible."}}}, - { - Num: 5, - Description: "TLP:AMBER and TLP:RED", - Messages: []Message{ - {Type: 0, Text: "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility."}}}, - { - Num: 6, - Description: "Redirects", - Messages: []Message{{Type: 0, Text: "No redirections found."}}}, - { - Num: 7, - Description: "provider-metadata.json", - Messages: []Message{{Type: 0, Text: "Found good provider metadata."}}}, - { - Num: 8, - Description: "security.txt", - Messages: []Message{{Type: 0, Text: "Performed no test of security.txt since the direct url of the provider-metadata.json was used."}}}, - { - Num: 9, - Description: "/.well-known/csaf/provider-metadata.json", - Messages: []Message{{Type: 0, Text: "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used."}}}, - { - Num: 10, - Description: "DNS path", - Messages: []Message{{Type: 0, Text: "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used."}}}, - { - Num: 11, - Description: "One folder per year", - Messages: []Message{{Type: 2, Text: fmt.Sprintf("No year folder found in %s/white/avendor-advisory-0004.json", url)}}}, - { - Num: 12, - Description: "index.txt", - Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/index.txt", url)}}}, - { - Num: 13, - Description: "changes.csv", - Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/changes.csv", url)}}}, - { - Num: 14, - Description: "Directory listings", - Messages: []Message{{Type: 0, Text: "All directory listings are valid."}}}, - { - Num: 15, - Description: "ROLIE feed", - Messages: []Message{{Type: 2, Text: "ROLIE feed based distribution was not used."}}}, - { - Num: 16, - Description: "ROLIE service document", - Messages: []Message{{Type: 1, Text: "No ROLIE service document found."}}}, - { - Num: 17, - Description: "ROLIE category document", - Messages: []Message{{Type: 1, Text: "No ROLIE category document found."}}}, - { - Num: 18, - Description: "Integrity", - Messages: []Message{{Type: 0, Text: "All checksums match."}}}, - { - Num: 19, - Description: "Signatures", - Messages: []Message{{Type: 0, Text: "All signatures verified."}}}, - { - Num: 20, - Description: "Public OpenPGP Key", - Messages: []Message{{Type: 0, Text: "1 public OpenPGP key(s) loaded."}}}, +func getRequirementTestData(t *testing.T, params testutil.ProviderParams, directoryProvider bool) []Requirement { + path := "../../testdata/processor-requirements/" + if params.EnableSha256 { + path += "sha256-" } + if params.EnableSha512 { + path += "sha512-" + } + if directoryProvider { + path += "directory" + } else { + path += "rolie" + } + path += ".json" + + content, err := os.ReadFile(path) + if err != nil { + t.Fatal(err) + } + + tmplt, err := template.New("base").Parse(string(content)) + if err != nil { + t.Fatal(err) + } + + var output bytes.Buffer + err = tmplt.Execute(&output, params) + if err != nil { + t.Fatal(err) + } + var requirement []Requirement + err = json.Unmarshal(output.Bytes(), &requirement) + if err != nil { + t.Fatal(err) + } + return requirement } func TestShaMarking(t *testing.T) { @@ -110,49 +65,42 @@ func TestShaMarking(t *testing.T) { directoryProvider bool enableSha256 bool enableSha512 bool - expected func(string) []Requirement }{ { name: "deliver sha256 and sha512", directoryProvider: false, enableSha256: true, enableSha512: true, - expected: getBaseRequirements, }, { name: "only deliver sha256", directoryProvider: false, enableSha256: true, enableSha512: false, - expected: getBaseRequirements, }, { name: "only deliver sha512", directoryProvider: false, enableSha256: false, enableSha512: true, - expected: getBaseRequirements, }, { - name: "only deliver sha256 and sha512, directory provider", + name: "deliver sha256 and sha512, directory provider", directoryProvider: true, enableSha256: true, enableSha512: true, - expected: getBaseRequirements, }, { name: "only deliver sha256, directory provider", directoryProvider: true, enableSha256: true, enableSha512: false, - expected: getBaseRequirements, }, { name: "only deliver sha512, directory provider", directoryProvider: true, enableSha256: false, enableSha512: true, - expected: getBaseRequirements, }, } @@ -191,11 +139,16 @@ func TestShaMarking(t *testing.T) { if err != nil { t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) } - expected := test.expected(serverURL) - for i, got := range report.Domains[0].Requirements { - want := expected[i] - if !reflect.DeepEqual(*got, want) { - t.Errorf("SHA marking %v: Expected %v, got %v", test.name, want, *got) + expected := getRequirementTestData(t, + testutil.ProviderParams{ + URL: serverURL, + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + }, + test.directoryProvider) + for i, want := range report.Domains[0].Requirements { + if !reflect.DeepEqual(expected[i], *want) { + t.Errorf("SHA marking %v: Expected %v, got %v", test.name, *want, expected[i]) } } diff --git a/testdata/processor-requirements/sha256-directory.json b/testdata/processor-requirements/sha256-directory.json new file mode 100644 index 0000000..a106977 --- /dev/null +++ b/testdata/processor-requirements/sha256-directory.json @@ -0,0 +1,206 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] \ No newline at end of file diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json new file mode 100644 index 0000000..5875174 --- /dev/null +++ b/testdata/processor-requirements/sha256-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-directory.json b/testdata/processor-requirements/sha256-sha512-directory.json new file mode 100644 index 0000000..3e30b9a --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-directory.json @@ -0,0 +1,206 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-rolie.json b/testdata/processor-requirements/sha256-sha512-rolie.json new file mode 100644 index 0000000..5875174 --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha512-directory.json b/testdata/processor-requirements/sha512-directory.json new file mode 100644 index 0000000..e47e1f9 --- /dev/null +++ b/testdata/processor-requirements/sha512-directory.json @@ -0,0 +1,207 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] + diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json new file mode 100644 index 0000000..5875174 --- /dev/null +++ b/testdata/processor-requirements/sha512-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] From df65ad13cbd222d2a2b1784287bd9e2e8b22ba7b Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 10 Dec 2024 10:13:42 +0100 Subject: [PATCH 078/117] Fix: return correct exit code --- cmd/csaf_validator/main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 9e844b7..1a34be0 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -150,7 +150,7 @@ func run(opts *options, files []string) error { } // Exit code is based on validation results - os.Exit(exitCodeAllValid) + os.Exit(exitCode) return nil } From fc404e499c90ead7643bbbbba4b75855bdbfe938 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Fri, 13 Dec 2024 13:33:22 +0100 Subject: [PATCH 079/117] Unfix: Add should-states --- testdata/processor-requirements/sha256-directory.json | 2 +- testdata/processor-requirements/sha256-rolie.json | 2 +- testdata/processor-requirements/sha512-directory.json | 2 +- testdata/processor-requirements/sha512-rolie.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/testdata/processor-requirements/sha256-directory.json b/testdata/processor-requirements/sha256-directory.json index a106977..46b4049 100644 --- a/testdata/processor-requirements/sha256-directory.json +++ b/testdata/processor-requirements/sha256-directory.json @@ -178,7 +178,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 0, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json index 5875174..61c5ccf 100644 --- a/testdata/processor-requirements/sha256-rolie.json +++ b/testdata/processor-requirements/sha256-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 1, + "type": 0, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha512-directory.json b/testdata/processor-requirements/sha512-directory.json index e47e1f9..5102fab 100644 --- a/testdata/processor-requirements/sha512-directory.json +++ b/testdata/processor-requirements/sha512-directory.json @@ -178,7 +178,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 0, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json index 5875174..61c5ccf 100644 --- a/testdata/processor-requirements/sha512-rolie.json +++ b/testdata/processor-requirements/sha512-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 1, + "type": 0, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] From a3d6d6acfb3fed53967ae8c024ddc2b565bd284b Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Dec 2024 14:26:00 +0100 Subject: [PATCH 080/117] Downgrade error to info in directory hash fetching --- cmd/csaf_checker/processor.go | 9 +++++++-- cmd/csaf_checker/processor_test.go | 6 +++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 7972e2b..eed561a 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -776,8 +776,13 @@ func (p *processor) integrity( continue } if res.StatusCode != http.StatusOK { - p.badIntegrities.error("Fetching %s failed: Status code %d (%s)", - hashFile, res.StatusCode, res.Status) + if f.IsDirectory() { + p.badIntegrities.info("Fetching %s failed: Status code %d (%s)", + hashFile, res.StatusCode, res.Status) + } else { + p.badIntegrities.error("Fetching %s failed: Status code %d (%s)", + hashFile, res.StatusCode, res.Status) + } continue } h, err := func() ([]byte, error) { diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index c4fb532..ea5aed5 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -146,9 +146,9 @@ func TestShaMarking(t *testing.T) { EnableSha512: test.enableSha512, }, test.directoryProvider) - for i, want := range report.Domains[0].Requirements { - if !reflect.DeepEqual(expected[i], *want) { - t.Errorf("SHA marking %v: Expected %v, got %v", test.name, *want, expected[i]) + for i, got := range report.Domains[0].Requirements { + if !reflect.DeepEqual(expected[i], *got) { + t.Errorf("SHA marking %v: Expected %v, got %v", test.name, expected[i], *got) } } From ebd96011fcfd38a6a6c8c82ab2a9e99d8aee3f8c Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Dec 2024 14:38:49 +0100 Subject: [PATCH 081/117] Revert new requirement 17 test Changing the ROLIE category fetching warning to info can be addressed later. --- testdata/processor-requirements/sha256-rolie.json | 2 +- testdata/processor-requirements/sha512-rolie.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json index 61c5ccf..5875174 100644 --- a/testdata/processor-requirements/sha256-rolie.json +++ b/testdata/processor-requirements/sha256-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 0, + "type": 1, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json index 61c5ccf..5875174 100644 --- a/testdata/processor-requirements/sha512-rolie.json +++ b/testdata/processor-requirements/sha512-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 0, + "type": 1, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] From 9dd4b7fc8dca06e7eb87e54da60680fd4f8a6b41 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Dec 2024 15:54:39 +0100 Subject: [PATCH 082/117] Add tests for no hash given or available --- cmd/csaf_checker/processor.go | 10 +- cmd/csaf_checker/processor_test.go | 101 ++++++--- internal/testutil/testutil.go | 10 +- .../processor-requirements/directory.json | 210 +++++++++++++++++ testdata/processor-requirements/rolie.json | 210 +++++++++++++++++ ...256-sha512-forbid-hash-fetching-rolie.json | 214 ++++++++++++++++++ 6 files changed, 711 insertions(+), 44 deletions(-) create mode 100644 testdata/processor-requirements/directory.json create mode 100644 testdata/processor-requirements/rolie.json create mode 100644 testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index eed561a..b913864 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -20,7 +20,6 @@ import ( "fmt" "io" "log" - "log/slog" "net/http" "net/url" "path/filepath" @@ -586,14 +585,11 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { switch { case sha256 == "" && sha512 == "": - slog.Error("No hash listed on ROLIE feed", "file", url) - return + p.badROLIEFeed.error("No hash listed on ROLIE feed %s", url) case sign == "": - slog.Error("No signature listed on ROLIE feed", "file", url) - return - default: - file = csaf.PlainAdvisoryFile{Path: url, SHA256: sha256, SHA512: sha512, Sign: sign} + p.badROLIEFeed.error("No signature listed on ROLIE feed %s", url) } + file = csaf.PlainAdvisoryFile{Path: url, SHA256: sha256, SHA512: sha512, Sign: sign} files = append(files, file) }) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index ea5aed5..5b0241e 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -29,6 +29,9 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct if params.EnableSha512 { path += "sha512-" } + if params.ForbidHashFetching { + path += "forbid-hash-fetching-" + } if directoryProvider { path += "directory" } else { @@ -61,46 +64,74 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct func TestShaMarking(t *testing.T) { tests := []struct { - name string - directoryProvider bool - enableSha256 bool - enableSha512 bool + name string + directoryProvider bool + enableSha256 bool + enableSha512 bool + forbidHashFetching bool }{ { - name: "deliver sha256 and sha512", - directoryProvider: false, - enableSha256: true, - enableSha512: true, + name: "deliver sha256 and sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidHashFetching: false, }, { - name: "only deliver sha256", - directoryProvider: false, - enableSha256: true, - enableSha512: false, + name: "enable sha256 and sha512, forbid fetching", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidHashFetching: true, }, { - name: "only deliver sha512", - directoryProvider: false, - enableSha256: false, - enableSha512: true, + name: "only deliver sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: false, + forbidHashFetching: false, }, { - name: "deliver sha256 and sha512, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: true, + name: "only deliver sha512", + directoryProvider: false, + enableSha256: false, + enableSha512: true, + forbidHashFetching: false, }, { - name: "only deliver sha256, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: false, + name: "deliver sha256 and sha512, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: true, + forbidHashFetching: false, }, { - name: "only deliver sha512, directory provider", - directoryProvider: true, - enableSha256: false, - enableSha512: true, + name: "only deliver sha256, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: false, + forbidHashFetching: false, + }, + { + name: "only deliver sha512, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: true, + forbidHashFetching: false, + }, + { + name: "no hash", + directoryProvider: false, + enableSha256: false, + enableSha512: false, + forbidHashFetching: false, + }, + { + name: "no hash, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: false, + forbidHashFetching: false, }, } @@ -111,9 +142,10 @@ func TestShaMarking(t *testing.T) { tt.Parallel() serverURL := "" params := testutil.ProviderParams{ - URL: "", - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidHashFetching: test.forbidHashFetching, } server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) defer server.Close() @@ -141,9 +173,10 @@ func TestShaMarking(t *testing.T) { } expected := getRequirementTestData(t, testutil.ProviderParams{ - URL: serverURL, - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, + URL: serverURL, + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidHashFetching: test.forbidHashFetching, }, test.directoryProvider) for i, got := range report.Domains[0].Requirements { diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go index 455d217..e933742 100644 --- a/internal/testutil/testutil.go +++ b/internal/testutil/testutil.go @@ -18,9 +18,10 @@ import ( // ProviderParams configures the test provider. type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool + URL string + EnableSha256 bool + EnableSha512 bool + ForbidHashFetching bool } // ProviderHandler returns a test provider handler with the specified configuration. @@ -49,6 +50,9 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle w.Header().Add("Content-Type", "text/html") case strings.HasSuffix(path, ".json"): w.Header().Add("Content-Type", "application/json") + case (strings.HasSuffix(path, ".sha256") || strings.HasSuffix(path, ".sha512")) && params.ForbidHashFetching: + w.WriteHeader(http.StatusForbidden) + return case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: w.WriteHeader(http.StatusNotFound) return diff --git a/testdata/processor-requirements/directory.json b/testdata/processor-requirements/directory.json new file mode 100644 index 0000000..ed61fcc --- /dev/null +++ b/testdata/processor-requirements/directory.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 404 (404 Not Found)" + }, + { + "type": 0, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] \ No newline at end of file diff --git a/testdata/processor-requirements/rolie.json b/testdata/processor-requirements/rolie.json new file mode 100644 index 0000000..cd65a7e --- /dev/null +++ b/testdata/processor-requirements/rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "No hash listed on ROLIE feed {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json new file mode 100644 index 0000000..03359f0 --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json @@ -0,0 +1,214 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 403 (403 Forbidden)" + }, + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 403 (403 Forbidden)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] From b1a76207636a7c312c94344b44546116f31c5641 Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 16 Dec 2024 12:23:10 +0100 Subject: [PATCH 083/117] Extend processor SHA fetching tests Allow to forbid individual hashes from downloading. This allows to for testing the behavior, if one of the hashes could not be downloaded. --- cmd/csaf_checker/processor_test.go | 119 +++++++++--------- internal/testutil/testutil.go | 14 ++- ...12-forbid-sha256-forbid-sha512-rolie.json} | 0 3 files changed, 68 insertions(+), 65 deletions(-) rename testdata/processor-requirements/{sha256-sha512-forbid-hash-fetching-rolie.json => sha256-sha512-forbid-sha256-forbid-sha512-rolie.json} (100%) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 5b0241e..9e3f112 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -29,8 +29,11 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct if params.EnableSha512 { path += "sha512-" } - if params.ForbidHashFetching { - path += "forbid-hash-fetching-" + if params.ForbidSha256 { + path += "forbid-sha256-" + } + if params.ForbidSha512 { + path += "forbid-sha512-" } if directoryProvider { path += "directory" @@ -64,74 +67,68 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct func TestShaMarking(t *testing.T) { tests := []struct { - name string - directoryProvider bool - enableSha256 bool - enableSha512 bool - forbidHashFetching bool + name string + directoryProvider bool + enableSha256 bool + enableSha512 bool + forbidSha256 bool + forbidSha512 bool }{ { - name: "deliver sha256 and sha512", - directoryProvider: false, - enableSha256: true, - enableSha512: true, - forbidHashFetching: false, + name: "deliver sha256 and sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, }, { - name: "enable sha256 and sha512, forbid fetching", - directoryProvider: false, - enableSha256: true, - enableSha512: true, - forbidHashFetching: true, + name: "enable sha256 and sha512, forbid fetching", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidSha256: true, + forbidSha512: true, }, { - name: "only deliver sha256", - directoryProvider: false, - enableSha256: true, - enableSha512: false, - forbidHashFetching: false, + name: "only deliver sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: false, }, { - name: "only deliver sha512", - directoryProvider: false, - enableSha256: false, - enableSha512: true, - forbidHashFetching: false, + name: "only deliver sha512", + directoryProvider: false, + enableSha256: false, + enableSha512: true, }, { - name: "deliver sha256 and sha512, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: true, - forbidHashFetching: false, + name: "deliver sha256 and sha512, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: true, }, { - name: "only deliver sha256, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: false, - forbidHashFetching: false, + name: "only deliver sha256, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: false, }, { - name: "only deliver sha512, directory provider", - directoryProvider: true, - enableSha256: false, - enableSha512: true, - forbidHashFetching: false, + name: "only deliver sha512, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: true, }, { - name: "no hash", - directoryProvider: false, - enableSha256: false, - enableSha512: false, - forbidHashFetching: false, + name: "no hash", + directoryProvider: false, + enableSha256: false, + enableSha512: false, }, { - name: "no hash, directory provider", - directoryProvider: true, - enableSha256: false, - enableSha512: false, - forbidHashFetching: false, + name: "no hash, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: false, }, } @@ -142,10 +139,11 @@ func TestShaMarking(t *testing.T) { tt.Parallel() serverURL := "" params := testutil.ProviderParams{ - URL: "", - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, - ForbidHashFetching: test.forbidHashFetching, + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidSha256: test.forbidSha256, + ForbidSha512: test.forbidSha512, } server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) defer server.Close() @@ -173,10 +171,11 @@ func TestShaMarking(t *testing.T) { } expected := getRequirementTestData(t, testutil.ProviderParams{ - URL: serverURL, - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, - ForbidHashFetching: test.forbidHashFetching, + URL: serverURL, + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidSha256: test.forbidSha256, + ForbidSha512: test.forbidSha512, }, test.directoryProvider) for i, got := range report.Domains[0].Requirements { diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go index e933742..c7bad68 100644 --- a/internal/testutil/testutil.go +++ b/internal/testutil/testutil.go @@ -18,10 +18,11 @@ import ( // ProviderParams configures the test provider. type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool - ForbidHashFetching bool + URL string + EnableSha256 bool + EnableSha512 bool + ForbidSha256 bool + ForbidSha512 bool } // ProviderHandler returns a test provider handler with the specified configuration. @@ -50,7 +51,10 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle w.Header().Add("Content-Type", "text/html") case strings.HasSuffix(path, ".json"): w.Header().Add("Content-Type", "application/json") - case (strings.HasSuffix(path, ".sha256") || strings.HasSuffix(path, ".sha512")) && params.ForbidHashFetching: + case (strings.HasSuffix(path, ".sha256")) && params.ForbidSha256: + w.WriteHeader(http.StatusForbidden) + return + case strings.HasSuffix(path, ".sha512") && params.ForbidSha512: w.WriteHeader(http.StatusForbidden) return case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: diff --git a/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha256-forbid-sha512-rolie.json similarity index 100% rename from testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json rename to testdata/processor-requirements/sha256-sha512-forbid-sha256-forbid-sha512-rolie.json From d38150c6a0d334300dfb3391964ea051c66aa4ce Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 16 Dec 2024 12:57:28 +0100 Subject: [PATCH 084/117] Add testdata for individual hash forbidden tests --- cmd/csaf_checker/processor_test.go | 16 ++ .../sha256-sha512-forbid-sha256-rolie.json | 210 ++++++++++++++++++ .../sha256-sha512-forbid-sha512-rolie.json | 210 ++++++++++++++++++ 3 files changed, 436 insertions(+) create mode 100644 testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json create mode 100644 testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 9e3f112..0710f32 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -88,6 +88,22 @@ func TestShaMarking(t *testing.T) { forbidSha256: true, forbidSha512: true, }, + { + name: "enable sha256 and sha512, forbid sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidSha256: true, + forbidSha512: false, + }, + { + name: "enable sha256 and sha512, forbid sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidSha256: false, + forbidSha512: true, + }, { name: "only deliver sha256", directoryProvider: false, diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json new file mode 100644 index 0000000..2a1f2a8 --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 403 (403 Forbidden)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json new file mode 100644 index 0000000..2a4c98f --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 403 (403 Forbidden)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] From bc5d149f74d2ce5e7ed03316141a31eafbd80ea1 Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 16 Dec 2024 19:28:24 +0100 Subject: [PATCH 085/117] Use exit code 1 for general errors, fix documentation --- cmd/csaf_validator/main.go | 2 +- docs/csaf_validator.md | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 1a34be0..346180b 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -23,7 +23,7 @@ import ( ) const ( - exitCodeSchemaInvalid = 1 << iota + exitCodeSchemaInvalid = 2 << iota exitCodeNoRemoteValidator exitCodeFailedRemoteValidation exitCodeAllValid = 0 diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index 64ded6d..a0e00bb 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -5,9 +5,10 @@ is a tool to validate local advisories files against the JSON Schema and an opti ### Exit codes If no fatal error occurs the program will exit with an exit code `n` with the following conditions: - `n == 0`: all valid -- `(n / 2) % 1 == 1`: schema validation failed -- `(n / 4) % 1 == 1`: no remote validator configured -- `(n / 8) % 1 == 1`: failure in remote validation +- `(n & 1) > 0`: general error, see logs +- `(n & 2) > 0`: schema validation failed +- `(n & 4) > 0`: no remote validator configured +- `(n & 8) > 0`: failure in remote validation ### Usage From 95ff418a270d618ffc2b6fb661e702cf7639d75f Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Wed, 18 Dec 2024 08:55:48 +0100 Subject: [PATCH 086/117] fix: Content-Type header for JSON responses * Remove `charset=utf-8` parameter, which is not allowed for JSON, according to rfc8259. --- cmd/csaf_provider/controller.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_provider/controller.go b/cmd/csaf_provider/controller.go index 7f64fe2..f04b7bd 100644 --- a/cmd/csaf_provider/controller.go +++ b/cmd/csaf_provider/controller.go @@ -174,7 +174,7 @@ func (c *controller) web( // writeJSON sets the header for the response and writes the JSON encoding of the given "content". // It logs out an error message in case of an error. func writeJSON(rw http.ResponseWriter, content any, code int) { - rw.Header().Set("Content-type", "application/json; charset=utf-8") + rw.Header().Set("Content-type", "application/json") rw.Header().Set("X-Content-Type-Options", "nosniff") rw.WriteHeader(code) if err := json.NewEncoder(rw).Encode(content); err != nil { From d8e903587a8744b51227da17867505da75a44c41 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 18 Dec 2024 15:37:58 +0100 Subject: [PATCH 087/117] Warn only if the other hash could be fetched --- cmd/csaf_checker/processor.go | 28 +++++++++++++------ .../sha256-sha512-forbid-sha256-rolie.json | 2 +- .../sha256-sha512-forbid-sha512-rolie.json | 2 +- 3 files changed, 22 insertions(+), 10 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index b913864..224e225 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -757,6 +757,9 @@ func (p *processor) integrity( hashes = append(hashes, hash{"SHA512", f.SHA512URL, s512.Sum(nil)}) } + couldFetchHash := false + hashFetchErrors := []string{} + for _, x := range hashes { hu, err := url.Parse(x.url()) if err != nil { @@ -768,19 +771,15 @@ func (p *processor) integrity( p.checkTLS(hashFile) if res, err = client.Get(hashFile); err != nil { - p.badIntegrities.error("Fetching %s failed: %v.", hashFile, err) + hashFetchErrors = append(hashFetchErrors, fmt.Sprintf("Fetching %s failed: %v.", hashFile, err)) continue } if res.StatusCode != http.StatusOK { - if f.IsDirectory() { - p.badIntegrities.info("Fetching %s failed: Status code %d (%s)", - hashFile, res.StatusCode, res.Status) - } else { - p.badIntegrities.error("Fetching %s failed: Status code %d (%s)", - hashFile, res.StatusCode, res.Status) - } + hashFetchErrors = append(hashFetchErrors, fmt.Sprintf("Fetching %s failed: Status code %d (%s)", + hashFile, res.StatusCode, res.Status)) continue } + couldFetchHash = true h, err := func() ([]byte, error) { defer res.Body.Close() return util.HashFromReader(res.Body) @@ -798,6 +797,19 @@ func (p *processor) integrity( x.ext, u, hashFile) } } + + msgType := ErrorType + // Log only as warning, if the other hash could be fetched + if couldFetchHash { + msgType = WarnType + } + if f.IsDirectory() { + msgType = InfoType + } + for _, fetchError := range hashFetchErrors { + p.badIntegrities.add(msgType, fetchError) + } + // Check signature su, err := url.Parse(f.SignURL()) if err != nil { diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json index 2a1f2a8..72a173a 100644 --- a/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json @@ -182,7 +182,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 1, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 403 (403 Forbidden)" } ] diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json index 2a4c98f..1ab8f1e 100644 --- a/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json @@ -182,7 +182,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 1, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 403 (403 Forbidden)" } ] From 8fc7f5bfad0c6022cbcc07cec36b875cb4ad292e Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 7 Jan 2025 12:23:40 +0100 Subject: [PATCH 088/117] Make documentation more explicit --- docs/csaf_validator.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index a0e00bb..87ec831 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -3,9 +3,11 @@ is a tool to validate local advisories files against the JSON Schema and an optional remote validator. ### Exit codes + If no fatal error occurs the program will exit with an exit code `n` with the following conditions: + - `n == 0`: all valid -- `(n & 1) > 0`: general error, see logs +- `(n & 1) > 0`: a general error occurred, all other flags are unset (see logs for more information) - `(n & 2) > 0`: schema validation failed - `(n & 4) > 0`: no remote validator configured - `(n & 8) > 0`: failure in remote validation From b8a5fa72d5d164b5996ec068de9c5e5e9bac15c5 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 8 Jan 2025 08:49:42 +0100 Subject: [PATCH 089/117] Fix nil check in downloader --- cmd/csaf_downloader/downloader.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index c8d92c1..ba6ccff 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -133,7 +133,7 @@ func (d *downloader) httpClient() util.Client { client := util.Client(&hClient) // Overwrite for testing purposes - if client != nil { + if d.client != nil { client = *d.client } From 9275a37a9faa07943b326ebded09559ef36a1084 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 8 Jan 2025 08:50:30 +0100 Subject: [PATCH 090/117] Format --- cmd/csaf_downloader/downloader.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index ba6ccff..88a63c2 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -514,7 +514,8 @@ nextAdvisory: url: file.SHA512URL(), warn: true, hashType: algSha512, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512))}) + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512)), + }) } else { slog.Info("SHA512 not present") } @@ -523,7 +524,8 @@ nextAdvisory: url: file.SHA256URL(), warn: true, hashType: algSha256, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256))}) + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256)), + }) } else { slog.Info("SHA256 not present") } From b6721e1d5ad3b2f4f4d6d37501a4b74cd665a2bd Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Fri, 10 Jan 2025 11:42:54 +0100 Subject: [PATCH 091/117] Add check for missing either sha256 or sha512 hashes only --- cmd/csaf_checker/processor.go | 4 ++++ testdata/processor-requirements/sha256-rolie.json | 2 +- testdata/processor-requirements/sha512-rolie.json | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 224e225..5c4f66e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -584,6 +584,10 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { var file csaf.AdvisoryFile switch { + case sha256 == "" && sha512 != "": + p.badROLIEFeed.info("%s has no sha256 hash file listed", url) + case sha256 != "" && sha512 == "": + p.badROLIEFeed.info("%s has no sha512 hash file listed", url) case sha256 == "" && sha512 == "": p.badROLIEFeed.error("No hash listed on ROLIE feed %s", url) case sign == "": diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json index 5875174..4ed47f1 100644 --- a/testdata/processor-requirements/sha256-rolie.json +++ b/testdata/processor-requirements/sha256-rolie.json @@ -153,7 +153,7 @@ "messages": [ { "type": 0, - "text": "All checked ROLIE feeds validated fine." + "text": "{{.URL}}/white/avendor-advisory-0004.json has no sha512 hash file listed" } ] }, diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json index 5875174..a2a195d 100644 --- a/testdata/processor-requirements/sha512-rolie.json +++ b/testdata/processor-requirements/sha512-rolie.json @@ -153,7 +153,7 @@ "messages": [ { "type": 0, - "text": "All checked ROLIE feeds validated fine." + "text": "{{.URL}}/white/avendor-advisory-0004.json has no sha256 hash file listed" } ] }, From 028f468d6f25f2e47d96fb1a5d924d3e22ab5949 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 23 Jan 2025 10:32:13 +0100 Subject: [PATCH 092/117] Fix typo in error message Closes #608 --- cmd/csaf_checker/reporters.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_checker/reporters.go b/cmd/csaf_checker/reporters.go index 157eabe..9cd3fc8 100644 --- a/cmd/csaf_checker/reporters.go +++ b/cmd/csaf_checker/reporters.go @@ -178,7 +178,7 @@ func (r *tlpAmberRedReporter) report(p *processor, domain *Domain) { return } if len(p.badAmberRedPermissions) == 0 { - req.message(InfoType, "All tested advisories labeled TLP:WHITE or TLP:RED were access-protected.") + req.message(InfoType, "All tested advisories labeled TLP:AMBER or TLP:RED were access-protected.") return } req.Messages = p.badAmberRedPermissions From 59d2cef0826080f9bf7bd60332c15ec614153834 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 23 Jan 2025 11:53:57 +0100 Subject: [PATCH 093/117] Fix typos --- cmd/csaf_validator/main.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 346180b..3250388 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -107,7 +107,7 @@ func run(opts *options, files []string) error { log.Printf("error: loading %q as JSON failed: %v\n", file, err) continue } - // Validate agsinst Schema. + // Validate against Schema. validationErrs, err := csaf.ValidateCSAF(doc) if err != nil { log.Printf("error: validating %q against schema failed: %v\n", @@ -124,7 +124,7 @@ func run(opts *options, files []string) error { fmt.Printf("%q passes the schema validation.\n", file) } - // Check filename agains ID + // Check filename against ID if err := util.IDMatchesFilename(eval, doc, filepath.Base(file)); err != nil { log.Printf("%s: %s.\n", file, err) continue From 6e8c2ecc059090865dd6bc48bc4ff0371757c8ee Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 23 Jan 2025 12:22:11 +0100 Subject: [PATCH 094/117] Check remote validator even if file validation fails This makes it consistent with the handling of schema validation. --- cmd/csaf_validator/main.go | 1 - 1 file changed, 1 deletion(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 3250388..b3a0855 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -127,7 +127,6 @@ func run(opts *options, files []string) error { // Check filename against ID if err := util.IDMatchesFilename(eval, doc, filepath.Base(file)); err != nil { log.Printf("%s: %s.\n", file, err) - continue } // Validate against remote validator. From 84026b682d80e1edcc3ca8a8346c69a7e8e56059 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Tue, 28 Jan 2025 17:41:54 +0100 Subject: [PATCH 095/117] Update README.md to exchange csaf.io until it is fixed --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 463b1d9..8f0c5f3 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ # csaf -Implements a [CSAF](https://csaf.io/) +Implements a [CSAF](https://oasis-open.github.io/csaf-documentation/) ([specification v2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. From 7d74543bbbf7cc3f5051f6fef3a84c97347d5eba Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 29 Jan 2025 09:02:18 +0100 Subject: [PATCH 096/117] Fix: Now give errors if lookup methods fail, refactor accordingly --- cmd/csaf_checker/processor.go | 72 ++++++++++++++++------------------- 1 file changed, 33 insertions(+), 39 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 5d1b69b..e07f5ad 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1340,49 +1340,57 @@ func (p *processor) checkSecurityFolder(folder string) string { // checkDNS checks if the "csaf.data.security.domain.tld" DNS record is available // and serves the "provider-metadata.json". // It returns an empty string if all checks are passed, otherwise the errormessage. -func (p *processor) checkDNS(domain string) string { +func (p *processor) checkDNS(domain string) { + + p.badDNSPath.use() client := p.httpClient() path := "https://csaf.data.security." + domain res, err := client.Get(path) if err != nil { - return fmt.Sprintf("Fetching %s failed: %v", path, err) + p.badDNSPath.add(ErrorType, + fmt.Sprintf("Fetching %s failed: %v", path, err)) + return } if res.StatusCode != http.StatusOK { - return fmt.Sprintf("Fetching %s failed. Status code %d (%s)", - path, res.StatusCode, res.Status) - + p.badDNSPath.add(ErrorType, + fmt.Sprintf("Fetching %s failed. Status code %d (%s)", + path, res.StatusCode, res.Status)) } hash := sha256.New() defer res.Body.Close() content, err := io.ReadAll(res.Body) if err != nil { - return fmt.Sprintf("Error while reading the response from %s", path) + p.badDNSPath.add(ErrorType, + fmt.Sprintf("Error while reading the response from %s", path)) } hash.Write(content) if !bytes.Equal(hash.Sum(nil), p.pmd256) { - return fmt.Sprintf("%s does not serve the same provider-metadata.json as previously found", path) + p.badDNSPath.add(ErrorType, + fmt.Sprintf("%s does not serve the same provider-metadata.json as previously found", + path)) } - return "" } -// checkWellknownMetadataReporter checks if the provider-metadata.json file is +// checkWellknown checks if the provider-metadata.json file is // available under the /.well-known/csaf/ directory. Returns the errormessage if // an error was encountered, or an empty string otherwise -func (p *processor) checkWellknown(domain string) string { +func (p *processor) checkWellknown(domain string) { + + p.badWellknownMetadata.use() client := p.httpClient() path := "https://" + domain + "/.well-known/csaf/provider-metadata.json" res, err := client.Get(path) if err != nil { - return fmt.Sprintf("Fetching %s failed: %v", path, err) + p.badWellknownMetadata.add(ErrorType, + fmt.Sprintf("Fetching %s failed: %v", path, err)) } if res.StatusCode != http.StatusOK { - return fmt.Sprintf("Fetching %s failed. Status code %d (%s)", - path, res.StatusCode, res.Status) + p.badWellknownMetadata.add(ErrorType, fmt.Sprintf("Fetching %s failed. Status code %d (%s)", + path, res.StatusCode, res.Status)) } - return "" } // checkWellknownSecurityDNS @@ -1401,50 +1409,36 @@ func (p *processor) checkWellknown(domain string) string { // In that case, errors are returned. func (p *processor) checkWellknownSecurityDNS(domain string) error { - warningsW := p.checkWellknown(domain) + p.checkWellknown(domain) + p.checkDNS(domain) + // Security check for well known (default) and legacy location - warningsS, sDMessage := p.checkSecurity(domain, false) + warnings, sDMessage := p.checkSecurity(domain, false) // if the security.txt under .well-known was not okay // check for a security.txt within its legacy location sLMessage := "" - if warningsS == 1 { - warningsS, sLMessage = p.checkSecurity(domain, true) + if warnings == 1 { + warnings, sLMessage = p.checkSecurity(domain, true) } - warningsD := p.checkDNS(domain) - p.badWellknownMetadata.use() p.badSecurity.use() - p.badDNSPath.use() - - var kind MessageType - if warningsS != 1 || warningsD == "" || warningsW == "" { - kind = WarnType - } else { - kind = ErrorType - } // Info, Warning or Error depending on kind and warningS - kindSD := kind - if warningsS == 0 { + kindSD := WarnType + if warnings == 0 { kindSD = InfoType } - kindSL := kind - if warningsS == 2 { + kindSL := ErrorType + if warnings == 2 { kindSL = InfoType } - if warningsW != "" { - p.badWellknownMetadata.add(kind, warningsW) - } p.badSecurity.add(kindSD, sDMessage) // only if the well-known security.txt was not successful: // report about the legacy location - if warningsS != 0 { + if warnings != 0 { p.badSecurity.add(kindSL, sLMessage) } - if warningsD != "" { - p.badDNSPath.add(kind, warningsD) - } return nil } From 02787b24b799113b769b9ce3bfaeeb66b435340e Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 29 Jan 2025 09:26:59 +0100 Subject: [PATCH 097/117] Update comments, clean up security check --- cmd/csaf_checker/processor.go | 44 +++++++++++++++++------------------ 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index e07f5ad..cb38bda 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1339,7 +1339,6 @@ func (p *processor) checkSecurityFolder(folder string) string { // checkDNS checks if the "csaf.data.security.domain.tld" DNS record is available // and serves the "provider-metadata.json". -// It returns an empty string if all checks are passed, otherwise the errormessage. func (p *processor) checkDNS(domain string) { p.badDNSPath.use() @@ -1373,8 +1372,7 @@ func (p *processor) checkDNS(domain string) { } // checkWellknown checks if the provider-metadata.json file is -// available under the /.well-known/csaf/ directory. Returns the errormessage if -// an error was encountered, or an empty string otherwise +// available under the /.well-known/csaf/ directory. func (p *processor) checkWellknown(domain string) { p.badWellknownMetadata.use() @@ -1402,15 +1400,13 @@ func (p *processor) checkWellknown(domain string) { // 4. Finally it checks if the "csaf.data.security.domain.tld" DNS record // is available and serves the "provider-metadata.json". // -// / -// If all three checks fail, errors are given, -// otherwise warnings for all failed checks. -// The function returns nil, unless errors outside the checks were found. -// In that case, errors are returned. +// For the security.txt checks, it first checks the default location. +// Should this lookup fail, a warning is will be given and a lookup +// for the legacy location will be made. If this fails as well, then an +// error is given. func (p *processor) checkWellknownSecurityDNS(domain string) error { p.checkWellknown(domain) - p.checkDNS(domain) // Security check for well known (default) and legacy location warnings, sDMessage := p.checkSecurity(domain, false) @@ -1423,22 +1419,24 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { p.badSecurity.use() - // Info, Warning or Error depending on kind and warningS - kindSD := WarnType - if warnings == 0 { - kindSD = InfoType - } - kindSL := ErrorType - if warnings == 2 { - kindSL = InfoType + // Report about Securitytxt: + // Only report about Legacy if default was succesful (0). + // Report default and legacy as errors if neither was succesful (1). + // Warn about missing security in the default position if not found + // but found in the legacy location, and inform about finding it there (2). + switch warnings { + case 0: + p.badSecurity.add(InfoType, sDMessage) + case 1: + p.badSecurity.add(ErrorType, sDMessage) + p.badSecurity.add(ErrorType, sLMessage) + case 2: + p.badSecurity.add(WarnType, sDMessage) + p.badSecurity.add(InfoType, sLMessage) } - p.badSecurity.add(kindSD, sDMessage) - // only if the well-known security.txt was not successful: - // report about the legacy location - if warnings != 0 { - p.badSecurity.add(kindSL, sLMessage) - } + p.checkDNS(domain) + return nil } From 82a6929e4dd9aea3743cb905e415665825f0dc89 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 29 Jan 2025 09:41:16 +0100 Subject: [PATCH 098/117] Fix: Poor phrasing corrected --- cmd/csaf_checker/processor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index cb38bda..d6f0f6b 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1420,7 +1420,7 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { p.badSecurity.use() // Report about Securitytxt: - // Only report about Legacy if default was succesful (0). + // Only report about default location if it was succesful (0). // Report default and legacy as errors if neither was succesful (1). // Warn about missing security in the default position if not found // but found in the legacy location, and inform about finding it there (2). From 6e02de974e537ace9cd08179225a715674f8f096 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 25 Feb 2025 15:03:38 +0100 Subject: [PATCH 099/117] update release workflow dependencies and so glibc * Update runner to ubuntu-22.04 which is the eldest to be supported by github from 2025-04-01. * Update github actions and go version needed. --- .github/workflows/release.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d1e370f..f77c9e3 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,23 +7,23 @@ on: jobs: releases-matrix: name: Release Go binaries - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 permissions: contents: write steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v5 with: - go-version: '^1.21.0' + go-version: '^1.23.6' - name: Build run: make dist - name: Upload release assets - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: files: | dist/csaf-*.zip From a4a90f4f9274b295c27bfb6df255e6b2a5134f45 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 25 Feb 2025 15:07:34 +0100 Subject: [PATCH 100/117] update go version to 1.23 --- .github/workflows/itest.yml | 6 +++--- README.md | 2 +- docs/Development.md | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index 9cc4c6b..b537b39 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -7,9 +7,9 @@ jobs: steps: - name: Set up Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v5 with: - go-version: 1.22.0 + go-version: '^1.23.6' - name: Set up Node.js uses: actions/setup-node@v3 @@ -17,7 +17,7 @@ jobs: node-version: 16 - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Execute the scripts run: | diff --git a/README.md b/README.md index 8f0c5f3..b76bf95 100644 --- a/README.md +++ b/README.md @@ -79,7 +79,7 @@ Download the binaries from the most recent release assets on Github. ### Build from sources -- A recent version of **Go** (1.22+) should be installed. [Go installation](https://go.dev/doc/install) +- A recent version of **Go** (1.23+) should be installed. [Go installation](https://go.dev/doc/install) - Clone the repository `git clone https://github.com/gocsaf/csaf.git ` diff --git a/docs/Development.md b/docs/Development.md index 5c4df22..bc71c2c 100644 --- a/docs/Development.md +++ b/docs/Development.md @@ -3,7 +3,7 @@ ## Supported Go versions We support the latest version and the one before -the latest version of Go (currently 1.22 and 1.23). +the latest version of Go (currently 1.23 and 1.24). ## Generated files From 3afa8d8b2e908cba70bddde5442240cab5ec9bb9 Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 25 Feb 2025 15:24:24 +0100 Subject: [PATCH 101/117] Upgrade to artifact action v4 --- .github/workflows/itest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index b537b39..8bc87d5 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -36,7 +36,7 @@ jobs: shell: bash - name: Upload test results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: checker-results path: | From e91bdec201822e1e334582a5dde0388e92d74994 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Mon, 3 Mar 2025 17:31:21 +0100 Subject: [PATCH 102/117] Add example for iterating product id and product helper (#617) * Add example for iterating product id and product helper * simplify code a bit * Remove newline --------- Co-authored-by: Sascha L. Teichmann --- examples/product_lister/main.go | 141 ++++++++++++++++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100644 examples/product_lister/main.go diff --git a/examples/product_lister/main.go b/examples/product_lister/main.go new file mode 100644 index 0000000..5ad26a9 --- /dev/null +++ b/examples/product_lister/main.go @@ -0,0 +1,141 @@ +// Package main implements a simple demo program to +// work with the csaf library. +package main + +import ( + "encoding/json" + "flag" + "fmt" + "log" + "os" + + "github.com/gocsaf/csaf/v3/csaf" +) + +func main() { + flag.Usage = func() { + if _, err := fmt.Fprintf(flag.CommandLine.Output(), + "Usage:\n %s [OPTIONS] files...\n\nOptions:\n", os.Args[0]); err != nil { + log.Fatalf("error: %v\n", err) + } + flag.PrintDefaults() + } + printProductIdentHelper := flag.Bool("print_ident_helper", false, "print product helper mapping") + flag.Parse() + + files := flag.Args() + if len(files) == 0 { + log.Println("No files given.") + return + } + + var printer func(*csaf.Advisory) error + if *printProductIdentHelper { + printer = printProductIdentHelperMapping + } else { + printer = printProductIDMapping + } + + if err := run(files, printer); err != nil { + log.Fatalf("error: %v\n", err) + } +} + +// visitFullProductNames iterates all full product names in the advisory. +func visitFullProductNames( + adv *csaf.Advisory, + visit func(*csaf.FullProductName), +) { + // Iterate over all full product names + if fpns := adv.ProductTree.FullProductNames; fpns != nil { + for _, fpn := range *fpns { + if fpn != nil && fpn.ProductID != nil { + visit(fpn) + } + } + } + + // Iterate over branches recursively + var recBranch func(b *csaf.Branch) + recBranch = func(b *csaf.Branch) { + if b == nil { + return + } + if fpn := b.Product; fpn != nil && fpn.ProductID != nil { + visit(fpn) + + } + for _, c := range b.Branches { + recBranch(c) + } + } + for _, b := range adv.ProductTree.Branches { + recBranch(b) + } + + // Iterate over relationships + if rels := adv.ProductTree.RelationShips; rels != nil { + for _, rel := range *rels { + if rel != nil { + if fpn := rel.FullProductName; fpn != nil && fpn.ProductID != nil { + visit(fpn) + } + } + } + } +} + +// run applies fn to all loaded advisories. +func run(files []string, fn func(*csaf.Advisory) error) error { + for _, file := range files { + adv, err := csaf.LoadAdvisory(file) + if err != nil { + return fmt.Errorf("loading %q failed: %w", file, err) + } + if err := fn(adv); err != nil { + return err + } + } + return nil +} + +// printJSON serializes v as indented JSON to stdout. +func printJSON(v any) error { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(v) +} + +// printProductIDMapping prints all product ids with their name and identification helper. +func printProductIDMapping(adv *csaf.Advisory) error { + type productNameHelperMapping struct { + FullProductName *csaf.FullProductName `json:"product"` + ProductIdentificationHelper *csaf.ProductIdentificationHelper `json:"product_identification_helper"` + } + + productIDMap := map[csaf.ProductID][]productNameHelperMapping{} + visitFullProductNames(adv, func(fpn *csaf.FullProductName) { + productIDMap[*fpn.ProductID] = append(productIDMap[*fpn.ProductID], productNameHelperMapping{ + FullProductName: fpn, + ProductIdentificationHelper: fpn.ProductIdentificationHelper, + }) + }) + return printJSON(productIDMap) +} + +// printProductIdentHelperMapping prints all product identifier helper with their product id. +func printProductIdentHelperMapping(adv *csaf.Advisory) error { + type productIdentIDMapping struct { + ProductNameHelperMapping csaf.ProductIdentificationHelper `json:"product_identification_helper"` + ProductID *csaf.ProductID `json:"product_id"` + } + + productIdentMap := []productIdentIDMapping{} + visitFullProductNames(adv, func(fpn *csaf.FullProductName) { + productIdentMap = append(productIdentMap, productIdentIDMapping{ + ProductNameHelperMapping: *fpn.ProductIdentificationHelper, + ProductID: fpn.ProductID, + }) + }) + return printJSON(productIdentMap) +} From 24f9af7f26bf558ec92dedc86317a1267b169896 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 5 Mar 2025 09:55:11 +0100 Subject: [PATCH 103/117] Add documentation for externally signed documents Closes #607 --- docs/csaf_uploader.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/csaf_uploader.md b/docs/csaf_uploader.md index 0e68aa9..76af99f 100644 --- a/docs/csaf_uploader.md +++ b/docs/csaf_uploader.md @@ -43,6 +43,12 @@ E.g. uploading a csaf-document which asks to enter a password interactively. +To upload an already signed document, use the `-x` option +```bash +# Note: The file CSAF-document-1.json.asc must exist +./csaf_uploader -x -a upload -I -t white -u https://localhost/cgi-bin/csaf_provider.go CSAF-document-1.json +``` + By default csaf_uploader will try to load a config file from the following places: From ec0c3f9c2ca9a9080f876944ddac5f0a583b5b11 Mon Sep 17 00:00:00 2001 From: Marcus Perlick <38723273+marcusperlick@users.noreply.github.com> Date: Mon, 10 Mar 2025 09:24:49 +0100 Subject: [PATCH 104/117] Fix potential leak of HTTP response body in downloadJSON of csaf_aggregator (#618) --- cmd/csaf_aggregator/client.go | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/cmd/csaf_aggregator/client.go b/cmd/csaf_aggregator/client.go index 916baa5..abd475c 100644 --- a/cmd/csaf_aggregator/client.go +++ b/cmd/csaf_aggregator/client.go @@ -10,6 +10,7 @@ package main import ( "errors" + "fmt" "io" "net/http" @@ -20,13 +21,14 @@ var errNotFound = errors.New("not found") func downloadJSON(c util.Client, url string, found func(io.Reader) error) error { res, err := c.Get(url) - if err != nil || res.StatusCode != http.StatusOK || + if err != nil { + return fmt.Errorf("not found: %w", err) + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK || res.Header.Get("Content-Type") != "application/json" { // ignore this as it is expected. return errNotFound } - return func() error { - defer res.Body.Close() - return found(res.Body) - }() + return found(res.Body) } From 3cfafa8263112d79d489dbc170004fcf3498340b Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 10 Mar 2025 11:11:34 +0100 Subject: [PATCH 105/117] Report error in checker if content type is not correct Related: #606 --- cmd/csaf_checker/processor.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index c0aafb2..397c88e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -678,9 +678,9 @@ func (p *processor) integrity( continue } - // Warn if we do not get JSON. + // Error if we do not get JSON. if ct := res.Header.Get("Content-Type"); ct != "application/json" { - lg(WarnType, + lg(ErrorType, "The content type of %s should be 'application/json' but is '%s'", u, ct) } From 534d6f049f9ed5cf54c75c8a2ede3a23511868f4 Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 10 Mar 2025 12:02:44 +0100 Subject: [PATCH 106/117] Add content-type error report test --- cmd/csaf_checker/processor_test.go | 53 ++++++++++++++++++++++++++++++ internal/testutil/testutil.go | 18 ++++++---- 2 files changed, 65 insertions(+), 6 deletions(-) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 0710f32..4d13908 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -14,6 +14,8 @@ import ( "net/http/httptest" "os" "reflect" + "slices" + "strings" "testing" "text/template" @@ -65,6 +67,57 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct return requirement } +func TestContentTypeReport(t *testing.T) { + serverURL := "" + params := testutil.ProviderParams{ + URL: "", + EnableSha256: true, + EnableSha512: true, + ForbidSha256: true, + ForbidSha512: true, + JSONContentType: "application/json; charset=utf-8", + } + server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, false)) + defer server.Close() + + serverURL = server.URL + params.URL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + cfg := config{} + err := cfg.prepare() + if err != nil { + t.Fatalf("SHA marking config failed: %v", err) + } + p, err := newProcessor(&cfg) + if err != nil { + t.Fatalf("could not init downloader: %v", err) + } + p.client = client + + report, err := p.run([]string{serverURL + "/provider-metadata.json"}) + if err != nil { + t.Errorf("Content-Type-Report: Expected no error, got: %v", err) + } + + got := report.Domains[0].Requirements + idx := slices.IndexFunc(got, func(e *Requirement) bool { + return e.Num == 7 + }) + if idx == -1 { + t.Error("Content-Type-Report: Could not find requirement") + } else { + message := got[idx].Messages[0] + if message.Type != ErrorType || !strings.Contains(message.Text, "should be 'application/json'") { + t.Errorf("Content-Type-Report: Content Type Error, got %v", message) + } + } + + p.close() +} + func TestShaMarking(t *testing.T) { tests := []struct { name string diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go index c7bad68..a8186a4 100644 --- a/internal/testutil/testutil.go +++ b/internal/testutil/testutil.go @@ -18,11 +18,12 @@ import ( // ProviderParams configures the test provider. type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool - ForbidSha256 bool - ForbidSha512 bool + URL string + EnableSha256 bool + EnableSha512 bool + ForbidSha256 bool + ForbidSha512 bool + JSONContentType string } // ProviderHandler returns a test provider handler with the specified configuration. @@ -35,6 +36,11 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle path += "simple-rolie-provider" } + jsonContenType := "application/json" + if params.JSONContentType != "" { + jsonContenType = params.JSONContentType + } + path += r.URL.Path if strings.HasSuffix(r.URL.Path, "/") { @@ -50,7 +56,7 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle case strings.HasSuffix(path, ".html"): w.Header().Add("Content-Type", "text/html") case strings.HasSuffix(path, ".json"): - w.Header().Add("Content-Type", "application/json") + w.Header().Add("Content-Type", jsonContenType) case (strings.HasSuffix(path, ".sha256")) && params.ForbidSha256: w.WriteHeader(http.StatusForbidden) return From 4429dd69857d59fe0ef2c6ca5a6974ac76062e50 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 13 Mar 2025 18:23:28 +0100 Subject: [PATCH 107/117] feat: add access-control-allow-origin header .. for better access from web applications. improve #479 --- docs/scripts/DNSConfigForItest.sh | 2 ++ docs/scripts/setupProviderForITest.sh | 11 +++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/docs/scripts/DNSConfigForItest.sh b/docs/scripts/DNSConfigForItest.sh index f7b85f0..9196af3 100755 --- a/docs/scripts/DNSConfigForItest.sh +++ b/docs/scripts/DNSConfigForItest.sh @@ -28,6 +28,8 @@ echo " location = / { try_files /.well-known/csaf/provider-metadata.json =404; + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; } access_log /var/log/nginx/dns-domain_access.log; diff --git a/docs/scripts/setupProviderForITest.sh b/docs/scripts/setupProviderForITest.sh index f9d7d18..2b6e6d1 100755 --- a/docs/scripts/setupProviderForITest.sh +++ b/docs/scripts/setupProviderForITest.sh @@ -56,11 +56,14 @@ location /cgi-bin/ { sudo sed -i "/^server {/a\ include fcgiwrap.conf;" $NGINX_CONFIG_PATH echo " - # For atomic directory switches - disable_symlinks off; + # For atomic directory switches + disable_symlinks off; - # directory listings - autoindex on; + # directory listings + autoindex on; + + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; " > locationConfig.txt sudo sed -i "/^\s*location \/ {/r locationConfig.txt" $NGINX_CONFIG_PATH # Insert config inside location{} ./DNSConfigForItest.sh From 527fe71992797095f99e95c02f69711dc629e03d Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 13 Mar 2025 18:30:38 +0100 Subject: [PATCH 108/117] feat: set acao header * adapt provider-setup.md to changes for the acao header. --- docs/provider-setup.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/provider-setup.md b/docs/provider-setup.md index 48c29d0..2fdf1e3 100644 --- a/docs/provider-setup.md +++ b/docs/provider-setup.md @@ -78,6 +78,9 @@ server { # directory listings autoindex on; + + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; } # enable CGI @@ -155,7 +158,7 @@ Again replacing `{clientCert.crt}` and `{clientKey.pem}` accordingly. To let nginx resolves the DNS record `csaf.data.security.domain.tld` to fulfill the [Requirement 10](https://docs.oasis-open.org/csaf/csaf/v2.0/cs01/csaf-v2.0-cs01.html#7110-requirement-10-dns-path) configure a new server block (virtual host) in a separated file under `/etc/nginx/available-sites/{DNSNAME}` like following: - + ```sh server { From 8163f578511f417a0c1b9b4b58de8574b7916736 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Fri, 14 Mar 2025 10:05:56 +0100 Subject: [PATCH 109/117] Compare changes dates (#609) * Feat: Compare dates in changes.csv to those within the files if existent * Fix: remove debug output and fix typo * Make map handling consistent * Improve: refactor time extraction * fix: some syntax fixes * Small nits * Fix: Check changes before stopping the scan of already tested advisories * Revert "Fix: Check changes before stopping the scan of already tested advisories - bad way to solve the problem, can cause problems" This reverts commit d38dc285cc8e664dc97f81418b2b52174e83e68b. * fix: delay checking of changes dates so it is not skipped most of the time * Fix time comparison --------- Co-authored-by: koplas Co-authored-by: Sascha L. Teichmann --- cmd/csaf_checker/processor.go | 85 ++++++++++++++++++++++++++--------- 1 file changed, 65 insertions(+), 20 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 397c88e..ae79133 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -53,6 +53,8 @@ type processor struct { pmd any keys *crypto.KeyRing labelChecker labelChecker + timesChanges map[string]time.Time + timesAdv map[string]time.Time invalidAdvisories topicMessages badFilenames topicMessages @@ -188,6 +190,9 @@ func newProcessor(cfg *config) (*processor, error) { advisories: map[csaf.TLPLabel]util.Set[string]{}, whiteAdvisories: map[identifier]bool{}, }, + timesAdv: map[string]time.Time{}, + timesChanges: map[string]time.Time{}, + noneTLS: util.Set[string]{}, }, nil } @@ -202,14 +207,14 @@ func (p *processor) close() { // reset clears the fields values of the given processor. func (p *processor) reset() { p.redirects = nil - p.noneTLS = nil - for k := range p.alreadyChecked { - delete(p.alreadyChecked, k) - } p.pmdURL = "" p.pmd256 = nil p.pmd = nil p.keys = nil + clear(p.alreadyChecked) + clear(p.noneTLS) + clear(p.timesAdv) + clear(p.timesChanges) p.invalidAdvisories.reset() p.badFilenames.reset() @@ -371,9 +376,6 @@ func (p *processor) checkDomain(domain string) error { // checkTLS parses the given URL to check its schema, as a result it sets // the value of "noneTLS" field if it is not HTTPS. func (p *processor) checkTLS(u string) { - if p.noneTLS == nil { - p.noneTLS = util.Set[string]{} - } if x, err := url.Parse(u); err == nil && x.Scheme != "https" { p.noneTLS.Add(u) } @@ -617,6 +619,8 @@ func makeAbsolute(base *url.URL) func(*url.URL) *url.URL { var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`) +// integrity checks several csaf.AdvisoryFiles for formal +// mistakes, from conforming filenames to invalid advisories. func (p *processor) integrity( files []csaf.AdvisoryFile, base string, @@ -732,19 +736,19 @@ func (p *processor) integrity( // Check if file is in the right folder. p.badFolders.use() - if date, err := p.expr.Eval( - `$.document.tracking.initial_release_date`, doc); err != nil { - p.badFolders.error( - "Extracting 'initial_release_date' from %s failed: %v", u, err) - } else if text, ok := date.(string); !ok { - p.badFolders.error("'initial_release_date' is not a string in %s", u) - } else if d, err := time.Parse(time.RFC3339, text); err != nil { - p.badFolders.error( - "Parsing 'initial_release_date' as RFC3339 failed in %s: %v", u, err) - } else if folderYear == nil { + switch date, fault := p.extractTime(doc, `initial_release_date`, u); { + case fault != "": + p.badFolders.error(fault) + case folderYear == nil: p.badFolders.error("No year folder found in %s", u) - } else if d.UTC().Year() != *folderYear { - p.badFolders.error("%s should be in folder %d", u, d.UTC().Year()) + case date.UTC().Year() != *folderYear: + p.badFolders.error("%s should be in folder %d", u, date.UTC().Year()) + } + current, fault := p.extractTime(doc, `current_release_date`, u) + if fault != "" { + p.badChanges.error(fault) + } else { + p.timesAdv[f.URL()] = current } // Check hashes @@ -861,9 +865,48 @@ func (p *processor) integrity( } } + // If we tested an existing changes.csv + if len(p.timesAdv) > 0 && p.badChanges.used() { + // Iterate over all files again + for _, f := range files { + // If there was no previous error when extracting times from advisories and we have a valid time + if timeAdv, ok := p.timesAdv[f.URL()]; ok { + // If there was no previous error when extracting times from changes and the file was listed in changes.csv + if timeCha, ok := p.timesChanges[f.URL()]; ok { + // check if the time matches + if !timeAdv.Equal(timeCha) { + // if not, give an error and remove the pair so it isn't reported multiple times should integrity be called again + p.badChanges.error("Current release date in changes.csv and %s is not identical.", f.URL()) + delete(p.timesAdv, f.URL()) + delete(p.timesChanges, f.URL()) + } + } + } + } + } + return nil } +// extractTime extracts a time.Time value from a json document and returns it and an empty string or zero time alongside +// a string representing the error message that prevented obtaining the proper time value. +func (p *processor) extractTime(doc any, value string, u any) (time.Time, string) { + filter := "$.document.tracking." + value + date, err := p.expr.Eval(filter, doc) + if err != nil { + return time.Time{}, fmt.Sprintf("Extracting '%s' from %s failed: %v", value, u, err) + } + text, ok := date.(string) + if !ok { + return time.Time{}, fmt.Sprintf("'%s' is not a string in %s", value, u) + } + d, err := time.Parse(time.RFC3339, text) + if err != nil { + return time.Time{}, fmt.Sprintf("Parsing '%s' as RFC3339 failed in %s: %v", value, u, err) + } + return d, "" +} + // checkIndex fetches the "index.txt" and calls "checkTLS" method for HTTPS checks. // It extracts the file names from the file and passes them to "integrity" function. // It returns error if fetching/reading the file(s) fails, otherwise nil. @@ -991,8 +1034,10 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] - times, files = append(times, t), + times, files = + append(times, t), append(files, csaf.DirectoryAdvisoryFile{Path: path}) + p.timesChanges[path] = t } return times, files, nil }() From 17f6a3ac7eb7fac39825fb1ae8c25398d288fedc Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 14 Mar 2025 10:26:19 +0100 Subject: [PATCH 110/117] Fix inconsistent format --- docs/scripts/setupProviderForITest.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/scripts/setupProviderForITest.sh b/docs/scripts/setupProviderForITest.sh index 2b6e6d1..ae6c6fc 100755 --- a/docs/scripts/setupProviderForITest.sh +++ b/docs/scripts/setupProviderForITest.sh @@ -56,14 +56,14 @@ location /cgi-bin/ { sudo sed -i "/^server {/a\ include fcgiwrap.conf;" $NGINX_CONFIG_PATH echo " - # For atomic directory switches - disable_symlinks off; + # For atomic directory switches + disable_symlinks off; - # directory listings - autoindex on; + # directory listings + autoindex on; - # allow others web applications to get the static information - add_header Access-Control-Allow-Origin "*"; + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; " > locationConfig.txt sudo sed -i "/^\s*location \/ {/r locationConfig.txt" $NGINX_CONFIG_PATH # Insert config inside location{} ./DNSConfigForItest.sh From a7821265ca4dfc65ec3966d970047c322900e188 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 17 Mar 2025 08:57:05 +0100 Subject: [PATCH 111/117] Move advisory downloading to download context method --- cmd/csaf_downloader/downloader.go | 616 ++++++++++++++++-------------- 1 file changed, 319 insertions(+), 297 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 3270a88..5af7f5e 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -417,6 +417,320 @@ func (d *downloader) logValidationIssues(url string, errors []string, err error) } } +// downloadContext stores the common context of a downloader. +type downloadContext struct { + d *downloader + client util.Client + data bytes.Buffer + lastDir string + initialReleaseDate time.Time + dateExtract func(any) error + lower string + stats stats + expr *util.PathEval +} + +func newDownloadContext(d *downloader, label csaf.TLPLabel) *downloadContext { + dc := &downloadContext{ + client: d.httpClient(), + lower: strings.ToLower(string(label)), + expr: util.NewPathEval(), + } + dc.dateExtract = util.TimeMatcher(&dc.initialReleaseDate, time.RFC3339) + return dc +} + +func (dc *downloadContext) downloadAdvisory( + file csaf.AdvisoryFile, + errorCh chan<- error, +) error { + u, err := url.Parse(file.URL()) + if err != nil { + dc.stats.downloadFailed++ + slog.Warn("Ignoring invalid URL", + "url", file.URL(), + "error", err) + return nil + } + + if dc.d.cfg.ignoreURL(file.URL()) { + slog.Debug("Ignoring URL", "url", file.URL()) + return nil + } + + // Ignore not conforming filenames. + filename := filepath.Base(u.Path) + if !util.ConformingFileName(filename) { + dc.stats.filenameFailed++ + slog.Warn("Ignoring none conforming filename", + "filename", filename) + return nil + } + + resp, err := dc.client.Get(file.URL()) + if err != nil { + dc.stats.downloadFailed++ + slog.Warn("Cannot GET", + "url", file.URL(), + "error", err) + return nil + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + dc.stats.downloadFailed++ + slog.Warn("Cannot load", + "url", file.URL(), + "status", resp.Status, + "status_code", resp.StatusCode) + return nil + } + + // Warn if we do not get JSON. + if ct := resp.Header.Get("Content-Type"); ct != "application/json" { + slog.Warn("Content type is not 'application/json'", + "url", file.URL(), + "content_type", ct) + } + + var ( + writers []io.Writer + s256, s512 hash.Hash + s256Data, s512Data []byte + remoteSHA256, remoteSHA512 []byte + signData []byte + ) + + hashToFetch := []hashFetchInfo{} + if file.SHA512URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA512URL(), + warn: true, + hashType: algSha512, + preferred: strings.EqualFold(string(dc.d.cfg.PreferredHash), string(algSha512)), + }) + } else { + slog.Info("SHA512 not present") + } + if file.SHA256URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA256URL(), + warn: true, + hashType: algSha256, + preferred: strings.EqualFold(string(dc.d.cfg.PreferredHash), string(algSha256)), + }) + } else { + slog.Info("SHA256 not present") + } + if file.IsDirectory() { + for i := range hashToFetch { + hashToFetch[i].warn = false + } + } + + remoteSHA256, s256Data, remoteSHA512, s512Data = loadHashes(dc.client, hashToFetch) + if remoteSHA512 != nil { + s512 = sha512.New() + writers = append(writers, s512) + } + if remoteSHA256 != nil { + s256 = sha256.New() + writers = append(writers, s256) + } + + // Remember the data as we need to store it to file later. + dc.data.Reset() + writers = append(writers, &dc.data) + + // Download the advisory and hash it. + hasher := io.MultiWriter(writers...) + + var doc any + + tee := io.TeeReader(resp.Body, hasher) + + if err := json.NewDecoder(tee).Decode(&doc); err != nil { + dc.stats.downloadFailed++ + slog.Warn("Downloading failed", + "url", file.URL(), + "error", err) + return nil + } + + // Compare the checksums. + s256Check := func() error { + if s256 != nil && !bytes.Equal(s256.Sum(nil), remoteSHA256) { + dc.stats.sha256Failed++ + return fmt.Errorf("SHA256 checksum of %s does not match", file.URL()) + } + return nil + } + + s512Check := func() error { + if s512 != nil && !bytes.Equal(s512.Sum(nil), remoteSHA512) { + dc.stats.sha512Failed++ + return fmt.Errorf("SHA512 checksum of %s does not match", file.URL()) + } + return nil + } + + // Validate OpenPGP signature. + keysCheck := func() error { + // Only check signature if we have loaded keys. + if dc.d.keys == nil { + return nil + } + var sign *crypto.PGPSignature + sign, signData, err = loadSignature(dc.client, file.SignURL()) + if err != nil { + slog.Warn("Downloading signature failed", + "url", file.SignURL(), + "error", err) + } + if sign != nil { + if err := dc.d.checkSignature(dc.data.Bytes(), sign); err != nil { + if !dc.d.cfg.IgnoreSignatureCheck { + dc.stats.signatureFailed++ + return fmt.Errorf("cannot verify signature for %s: %v", file.URL(), err) + } + } + } + return nil + } + + // Validate against CSAF schema. + schemaCheck := func() error { + if errors, err := csaf.ValidateCSAF(doc); err != nil || len(errors) > 0 { + dc.stats.schemaFailed++ + dc.d.logValidationIssues(file.URL(), errors, err) + return fmt.Errorf("schema validation for %q failed", file.URL()) + } + return nil + } + + // Validate if filename is conforming. + filenameCheck := func() error { + if err := util.IDMatchesFilename(dc.expr, doc, filename); err != nil { + dc.stats.filenameFailed++ + return fmt.Errorf("filename not conforming %s: %s", file.URL(), err) + } + return nil + } + + // Validate against remote validator. + remoteValidatorCheck := func() error { + if dc.d.validator == nil { + return nil + } + rvr, err := dc.d.validator.Validate(doc) + if err != nil { + errorCh <- fmt.Errorf( + "calling remote validator on %q failed: %w", + file.URL(), err) + return nil + } + if !rvr.Valid { + dc.stats.remoteFailed++ + return fmt.Errorf("remote validation of %q failed", file.URL()) + } + return nil + } + + // Run all the validations. + valStatus := notValidatedValidationStatus + for _, check := range []func() error{ + s256Check, + s512Check, + keysCheck, + schemaCheck, + filenameCheck, + remoteValidatorCheck, + } { + if err := check(); err != nil { + slog.Error("Validation check failed", "error", err) + valStatus.update(invalidValidationStatus) + if dc.d.cfg.ValidationMode == validationStrict { + return nil + } + } + } + valStatus.update(validValidationStatus) + + // Send to forwarder + if dc.d.forwarder != nil { + dc.d.forwarder.forward( + filename, dc.data.String(), + valStatus, + string(s256Data), + string(s512Data)) + } + + if dc.d.cfg.NoStore { + // Do not write locally. + if valStatus == validValidationStatus { + dc.stats.succeeded++ + } + return nil + } + + if err := dc.expr.Extract( + `$.document.tracking.initial_release_date`, dc.dateExtract, false, doc, + ); err != nil { + slog.Warn("Cannot extract initial_release_date from advisory", + "url", file.URL()) + dc.initialReleaseDate = time.Now() + } + dc.initialReleaseDate = dc.initialReleaseDate.UTC() + + // Advisories that failed validation are stored in a special folder. + var newDir string + if valStatus != validValidationStatus { + newDir = path.Join(dc.d.cfg.Directory, failedValidationDir) + } else { + newDir = dc.d.cfg.Directory + } + + // Do we have a configured destination folder? + if dc.d.cfg.Folder != "" { + newDir = path.Join(newDir, dc.d.cfg.Folder) + } else { + newDir = path.Join(newDir, dc.lower, strconv.Itoa(dc.initialReleaseDate.Year())) + } + + if newDir != dc.lastDir { + if err := dc.d.mkdirAll(newDir, 0755); err != nil { + errorCh <- err + return nil + } + dc.lastDir = newDir + } + + // Write advisory to file + path := filepath.Join(dc.lastDir, filename) + + // Write data to disk. + for _, x := range []struct { + p string + d []byte + }{ + {path, dc.data.Bytes()}, + {path + ".sha256", s256Data}, + {path + ".sha512", s512Data}, + {path + ".asc", signData}, + } { + if x.d != nil { + if err := os.WriteFile(x.p, x.d, 0644); err != nil { + errorCh <- err + return nil + } + } + } + + dc.stats.succeeded++ + slog.Info("Written advisory", "path", path) + return nil +} + func (d *downloader) downloadWorker( ctx context.Context, wg *sync.WaitGroup, @@ -426,21 +740,11 @@ func (d *downloader) downloadWorker( ) { defer wg.Done() - var ( - client = d.httpClient() - data bytes.Buffer - lastDir string - initialReleaseDate time.Time - dateExtract = util.TimeMatcher(&initialReleaseDate, time.RFC3339) - lower = strings.ToLower(string(label)) - stats = stats{} - expr = util.NewPathEval() - ) + dc := newDownloadContext(d, label) // Add collected stats back to total. - defer d.addStats(&stats) + defer d.addStats(&dc.stats) -nextAdvisory: for { var file csaf.AdvisoryFile var ok bool @@ -452,292 +756,10 @@ nextAdvisory: case <-ctx.Done(): return } - - u, err := url.Parse(file.URL()) - if err != nil { - stats.downloadFailed++ - slog.Warn("Ignoring invalid URL", - "url", file.URL(), - "error", err) - continue + if err := dc.downloadAdvisory(file, errorCh); err != nil { + slog.Error("download terminated", "error", err) + return } - - if d.cfg.ignoreURL(file.URL()) { - slog.Debug("Ignoring URL", "url", file.URL()) - continue - } - - // Ignore not conforming filenames. - filename := filepath.Base(u.Path) - if !util.ConformingFileName(filename) { - stats.filenameFailed++ - slog.Warn("Ignoring none conforming filename", - "filename", filename) - continue - } - - resp, err := client.Get(file.URL()) - if err != nil { - stats.downloadFailed++ - slog.Warn("Cannot GET", - "url", file.URL(), - "error", err) - continue - } - - if resp.StatusCode != http.StatusOK { - stats.downloadFailed++ - slog.Warn("Cannot load", - "url", file.URL(), - "status", resp.Status, - "status_code", resp.StatusCode) - continue - } - - // Warn if we do not get JSON. - if ct := resp.Header.Get("Content-Type"); ct != "application/json" { - slog.Warn("Content type is not 'application/json'", - "url", file.URL(), - "content_type", ct) - } - - var ( - writers []io.Writer - s256, s512 hash.Hash - s256Data, s512Data []byte - remoteSHA256, remoteSHA512 []byte - signData []byte - ) - - hashToFetch := []hashFetchInfo{} - if file.SHA512URL() != "" { - hashToFetch = append(hashToFetch, hashFetchInfo{ - url: file.SHA512URL(), - warn: true, - hashType: algSha512, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512)), - }) - } else { - slog.Info("SHA512 not present") - } - if file.SHA256URL() != "" { - hashToFetch = append(hashToFetch, hashFetchInfo{ - url: file.SHA256URL(), - warn: true, - hashType: algSha256, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256)), - }) - } else { - slog.Info("SHA256 not present") - } - if file.IsDirectory() { - for i := range hashToFetch { - hashToFetch[i].warn = false - } - } - - remoteSHA256, s256Data, remoteSHA512, s512Data = loadHashes(client, hashToFetch) - if remoteSHA512 != nil { - s512 = sha512.New() - writers = append(writers, s512) - } - if remoteSHA256 != nil { - s256 = sha256.New() - writers = append(writers, s256) - } - - // Remember the data as we need to store it to file later. - data.Reset() - writers = append(writers, &data) - - // Download the advisory and hash it. - hasher := io.MultiWriter(writers...) - - var doc any - - if err := func() error { - defer resp.Body.Close() - tee := io.TeeReader(resp.Body, hasher) - return json.NewDecoder(tee).Decode(&doc) - }(); err != nil { - stats.downloadFailed++ - slog.Warn("Downloading failed", - "url", file.URL(), - "error", err) - continue - } - - // Compare the checksums. - s256Check := func() error { - if s256 != nil && !bytes.Equal(s256.Sum(nil), remoteSHA256) { - stats.sha256Failed++ - return fmt.Errorf("SHA256 checksum of %s does not match", file.URL()) - } - return nil - } - - s512Check := func() error { - if s512 != nil && !bytes.Equal(s512.Sum(nil), remoteSHA512) { - stats.sha512Failed++ - return fmt.Errorf("SHA512 checksum of %s does not match", file.URL()) - } - return nil - } - - // Validate OpenPGP signature. - keysCheck := func() error { - // Only check signature if we have loaded keys. - if d.keys == nil { - return nil - } - var sign *crypto.PGPSignature - sign, signData, err = loadSignature(client, file.SignURL()) - if err != nil { - slog.Warn("Downloading signature failed", - "url", file.SignURL(), - "error", err) - } - if sign != nil { - if err := d.checkSignature(data.Bytes(), sign); err != nil { - if !d.cfg.IgnoreSignatureCheck { - stats.signatureFailed++ - return fmt.Errorf("cannot verify signature for %s: %v", file.URL(), err) - } - } - } - return nil - } - - // Validate against CSAF schema. - schemaCheck := func() error { - if errors, err := csaf.ValidateCSAF(doc); err != nil || len(errors) > 0 { - stats.schemaFailed++ - d.logValidationIssues(file.URL(), errors, err) - return fmt.Errorf("schema validation for %q failed", file.URL()) - } - return nil - } - - // Validate if filename is conforming. - filenameCheck := func() error { - if err := util.IDMatchesFilename(expr, doc, filename); err != nil { - stats.filenameFailed++ - return fmt.Errorf("filename not conforming %s: %s", file.URL(), err) - } - return nil - } - - // Validate against remote validator. - remoteValidatorCheck := func() error { - if d.validator == nil { - return nil - } - rvr, err := d.validator.Validate(doc) - if err != nil { - errorCh <- fmt.Errorf( - "calling remote validator on %q failed: %w", - file.URL(), err) - return nil - } - if !rvr.Valid { - stats.remoteFailed++ - return fmt.Errorf("remote validation of %q failed", file.URL()) - } - return nil - } - - // Run all the validations. - valStatus := notValidatedValidationStatus - for _, check := range []func() error{ - s256Check, - s512Check, - keysCheck, - schemaCheck, - filenameCheck, - remoteValidatorCheck, - } { - if err := check(); err != nil { - slog.Error("Validation check failed", "error", err) - valStatus.update(invalidValidationStatus) - if d.cfg.ValidationMode == validationStrict { - continue nextAdvisory - } - } - } - valStatus.update(validValidationStatus) - - // Send to forwarder - if d.forwarder != nil { - d.forwarder.forward( - filename, data.String(), - valStatus, - string(s256Data), - string(s512Data)) - } - - if d.cfg.NoStore { - // Do not write locally. - if valStatus == validValidationStatus { - stats.succeeded++ - } - continue - } - - if err := expr.Extract( - `$.document.tracking.initial_release_date`, dateExtract, false, doc, - ); err != nil { - slog.Warn("Cannot extract initial_release_date from advisory", - "url", file.URL()) - initialReleaseDate = time.Now() - } - initialReleaseDate = initialReleaseDate.UTC() - - // Advisories that failed validation are stored in a special folder. - var newDir string - if valStatus != validValidationStatus { - newDir = path.Join(d.cfg.Directory, failedValidationDir) - } else { - newDir = d.cfg.Directory - } - - // Do we have a configured destination folder? - if d.cfg.Folder != "" { - newDir = path.Join(newDir, d.cfg.Folder) - } else { - newDir = path.Join(newDir, lower, strconv.Itoa(initialReleaseDate.Year())) - } - - if newDir != lastDir { - if err := d.mkdirAll(newDir, 0755); err != nil { - errorCh <- err - continue - } - lastDir = newDir - } - - // Write advisory to file - path := filepath.Join(lastDir, filename) - - // Write data to disk. - for _, x := range []struct { - p string - d []byte - }{ - {path, data.Bytes()}, - {path + ".sha256", s256Data}, - {path + ".sha512", s512Data}, - {path + ".asc", signData}, - } { - if x.d != nil { - if err := os.WriteFile(x.p, x.d, 0644); err != nil { - errorCh <- err - continue nextAdvisory - } - } - } - - stats.succeeded++ - slog.Info("Written advisory", "path", path) } } From 5437d8127a8245ea5da2d7162c63c844e16156e9 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 17 Mar 2025 09:10:03 +0100 Subject: [PATCH 112/117] Store downloader in context --- cmd/csaf_downloader/downloader.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 5af7f5e..f0778ee 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -432,6 +432,7 @@ type downloadContext struct { func newDownloadContext(d *downloader, label csaf.TLPLabel) *downloadContext { dc := &downloadContext{ + d: d, client: d.httpClient(), lower: strings.ToLower(string(label)), expr: util.NewPathEval(), From 5709b14650682d1d9e5614ba586d3dc96a0aa27a Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 19 Mar 2025 09:04:19 +0100 Subject: [PATCH 113/117] Extend structured logging usage in aggregator (#622) * Extend structured logging usage in aggregator * Use structured logging in advisories processor * Remove unnecessary inner function * Format * Feat: Add verbose flag to example aggregator toml (in comment) --------- Co-authored-by: JanHoefelmeyer --- cmd/csaf_aggregator/config.go | 15 ++++++++---- cmd/csaf_aggregator/mirror.go | 7 +++--- csaf/advisories.go | 43 +++++++++++++++++------------------ docs/examples/aggregator.toml | 1 + 4 files changed, 36 insertions(+), 30 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 3c2c46b..55a7193 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -264,8 +264,14 @@ func (c *config) privateOpenPGPKey() (*crypto.Key, error) { return c.key, c.keyErr } -func (c *config) httpClient(p *provider) util.Client { +// httpLog does structured logging in a [util.LoggingClient]. +func httpLog(method, url string) { + slog.Debug("http", + "method", method, + "url", url) +} +func (c *config) httpClient(p *provider) util.Client { hClient := http.Client{} var tlsConfig tls.Config @@ -310,7 +316,10 @@ func (c *config) httpClient(p *provider) util.Client { } if c.Verbose { - client = &util.LoggingClient{Client: client} + client = &util.LoggingClient{ + Client: client, + Log: httpLog, + } } if p.Rate == nil && c.Rate == nil { @@ -331,7 +340,6 @@ func (c *config) httpClient(p *provider) util.Client { } func (c *config) checkProviders() error { - if !c.AllowSingleProvider && len(c.Providers) < 2 { return errors.New("need at least two providers") } @@ -471,7 +479,6 @@ func (c *config) prepareCertificates() error { // prepare prepares internal state of a loaded configuration. func (c *config) prepare() error { - if len(c.Providers) == 0 { return errors.New("no providers given in configuration") } diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index c90ef68..e7c5154 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -462,8 +462,9 @@ func (w *worker) extractCategories(label string, advisory any) error { expr := cat[len(exprPrefix):] // Compile first to check that the expression is okay. if _, err := w.expr.Compile(expr); err != nil { - fmt.Printf("Compiling category expression %q failed: %v\n", - expr, err) + slog.Error("Compiling category expression failed", + "expr", expr, + "err", err) continue } // Ignore errors here as they result from not matching. @@ -588,12 +589,10 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) if err := os.MkdirAll(yearDir, 0755); err != nil { return err } - //log.Printf("created %s\n", yearDir) yearDirs[year] = yearDir } fname := filepath.Join(yearDir, filename) - //log.Printf("write: %s\n", fname) data := content.Bytes() if err := writeFileHashes( fname, filename, diff --git a/csaf/advisories.go b/csaf/advisories.go index df23935..ef3fea8 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -9,10 +9,10 @@ package csaf import ( + "context" "encoding/csv" "fmt" "io" - "log" "log/slog" "net/http" "net/url" @@ -91,7 +91,7 @@ func (daf DirectoryAdvisoryFile) LogValue() slog.Value { // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { AgeAccept func(time.Time) bool - Log func(format string, args ...any) + Log func(loglevel slog.Level, format string, args ...any) client util.Client expr *util.PathEval doc any @@ -131,8 +131,8 @@ func (afp *AdvisoryFileProcessor) Process( ) error { lg := afp.Log if lg == nil { - lg = func(format string, args ...any) { - log.Printf("AdvisoryFileProcessor.Process: "+format, args...) + lg = func(loglevel slog.Level, format string, args ...any) { + slog.Log(context.Background(), loglevel, "AdvisoryFileProcessor.Process: "+format, args...) } } @@ -140,7 +140,7 @@ func (afp *AdvisoryFileProcessor) Process( rolie, err := afp.expr.Eval( "$.distributions[*].rolie.feeds", afp.doc) if err != nil { - lg("rolie check failed: %v\n", err) + lg(slog.LevelError, "rolie check failed", "err", err) return err } @@ -152,7 +152,7 @@ func (afp *AdvisoryFileProcessor) Process( if err := util.ReMarshalJSON(&feeds, rolie); err != nil { return err } - lg("Found %d ROLIE feed(s).\n", len(feeds)) + lg(slog.LevelInfo, "Found ROLIE feed(s)", "length", len(feeds)) for _, feed := range feeds { if err := afp.processROLIE(feed, fn); err != nil { @@ -168,12 +168,12 @@ func (afp *AdvisoryFileProcessor) Process( var dirURLs []string if err != nil { - lg("extracting directory URLs failed: %v\n", err) + lg(slog.LevelError, "extracting directory URLs failed", "err", err) } else { var ok bool dirURLs, ok = util.AsStrings(directoryURLs) if !ok { - lg("directory_urls are not strings.\n") + lg(slog.LevelError, "directory_urls are not strings") } } @@ -209,9 +209,8 @@ func (afp *AdvisoryFileProcessor) Process( // prefixed by baseURL/. func (afp *AdvisoryFileProcessor) loadChanges( baseURL string, - lg func(string, ...any), + lg func(slog.Level, string, ...any), ) ([]AdvisoryFile, error) { - base, err := url.Parse(baseURL) if err != nil { return nil, err @@ -244,12 +243,12 @@ func (afp *AdvisoryFileProcessor) loadChanges( return nil, err } if len(r) < 2 { - lg("%q has not enough columns in line %d", line) + lg(slog.LevelError, "Not enough columns", "line", line) continue } t, err := time.Parse(time.RFC3339, r[timeColumn]) if err != nil { - lg("%q has an invalid time stamp in line %d: %v", changesURL, line, err) + lg(slog.LevelError, "Invalid time stamp in line", "url", changesURL, "line", line, "err", err) continue } // Apply date range filtering. @@ -258,7 +257,7 @@ func (afp *AdvisoryFileProcessor) loadChanges( } path := r[pathColumn] if _, err := url.Parse(path); err != nil { - lg("%q contains an invalid URL %q in line %d", changesURL, path, line) + lg(slog.LevelError, "Contains an invalid URL", "url", changesURL, "path", path, "line", line) continue } @@ -279,31 +278,31 @@ func (afp *AdvisoryFileProcessor) processROLIE( } up, err := url.Parse(string(*feed.URL)) if err != nil { - log.Printf("Invalid URL %s in feed: %v.", *feed.URL, err) + slog.Error("Invalid URL in feed", "feed", *feed.URL, "err", err) continue } feedURL := afp.base.ResolveReference(up) - log.Printf("Feed URL: %s\n", feedURL) + slog.Info("Got feed URL", "feed", feedURL) fb, err := util.BaseURL(feedURL) if err != nil { - log.Printf("error: Invalid feed base URL '%s': %v\n", fb, err) + slog.Error("Invalid feed base URL", "url", fb, "err", err) continue } feedBaseURL, err := url.Parse(fb) if err != nil { - log.Printf("error: Cannot parse feed base URL '%s': %v\n", fb, err) + slog.Error("Cannot parse feed base URL", "url", fb, "err", err) continue } res, err := afp.client.Get(feedURL.String()) if err != nil { - log.Printf("error: Cannot get feed '%s'\n", err) + slog.Error("Cannot get feed", "err", err) continue } if res.StatusCode != http.StatusOK { - log.Printf("error: Fetching %s failed. Status code %d (%s)", - feedURL, res.StatusCode, res.Status) + slog.Error("Fetching failed", + "url", feedURL, "status_code", res.StatusCode, "status", res.Status) continue } rfeed, err := func() (*ROLIEFeed, error) { @@ -311,7 +310,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( return LoadROLIEFeed(res.Body) }() if err != nil { - log.Printf("Loading ROLIE feed failed: %v.", err) + slog.Error("Loading ROLIE feed failed", "err", err) continue } @@ -323,7 +322,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( } p, err := url.Parse(u) if err != nil { - log.Printf("error: Invalid URL '%s': %v", u, err) + slog.Error("Invalid URL", "url", u, "err", err) return "" } return feedBaseURL.ResolveReference(p).String() diff --git a/docs/examples/aggregator.toml b/docs/examples/aggregator.toml index 2161079..8d4ee80 100644 --- a/docs/examples/aggregator.toml +++ b/docs/examples/aggregator.toml @@ -5,6 +5,7 @@ web = "/var/csaf_aggregator/html" domain = "https://localhost:9443" rate = 10.0 insecure = true +#verbose = false #openpgp_private_key = #openpgp_public_key = #interim_years = From 0848143a0bbcd83cecf626be7d8379759121de53 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 19 Mar 2025 09:39:07 +0100 Subject: [PATCH 114/117] Update lint (#626) * Update linter * Format * Fix lint --- .github/workflows/go.yml | 6 +++--- cmd/csaf_aggregator/client_test.go | 4 ++-- cmd/csaf_downloader/downloader_test.go | 6 ++---- cmd/csaf_downloader/forwarder.go | 6 +++--- cmd/csaf_provider/main.go | 2 +- internal/options/options_test.go | 9 ++++----- util/file_test.go | 2 +- 7 files changed, 16 insertions(+), 19 deletions(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 95ee8c7..b86309f 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -17,7 +17,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v4 with: - go-version: 'stable' + go-version: "stable" - name: Build run: go build -v ./cmd/... @@ -31,10 +31,10 @@ jobs: gofmt-flags: "-l -d" - name: golint - uses: Jerome1337/golint-action@v1.0.2 + uses: Jerome1337/golint-action@v1.0.3 - name: Revive Action - uses: morphy2k/revive-action@v2.5.1 + uses: morphy2k/revive-action@v2.7.4 - name: Tests run: go test -v ./... diff --git a/cmd/csaf_aggregator/client_test.go b/cmd/csaf_aggregator/client_test.go index fc5b095..3617ce6 100644 --- a/cmd/csaf_aggregator/client_test.go +++ b/cmd/csaf_aggregator/client_test.go @@ -49,10 +49,10 @@ func Test_downloadJSON(t *testing.T) { test := testToRun t.Run(test.name, func(tt *testing.T) { tt.Parallel() - found := func(r io.Reader) error { + found := func(_ io.Reader) error { return nil } - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.Header().Add("Content-Type", test.contentType) w.WriteHeader(test.statusCode) })) diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go index d7eaae3..1485ec9 100644 --- a/cmd/csaf_downloader/downloader_test.go +++ b/cmd/csaf_downloader/downloader_test.go @@ -24,12 +24,10 @@ import ( func checkIfFileExists(path string, t *testing.T) bool { if _, err := os.Stat(path); err == nil { return true - } else if errors.Is(err, os.ErrNotExist) { - return false - } else { + } else if !errors.Is(err, os.ErrNotExist) { t.Fatalf("Failed to check if file exists: %v", err) - return false } + return false } func TestShaMarking(t *testing.T) { diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index 1598283..ac2c336 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -224,12 +224,12 @@ func (f *forwarder) storeFailed(filename, doc, sha256, sha512 string) { // limitedString reads max bytes from reader and returns it as a string. // Longer strings are indicated by "..." as a suffix. -func limitedString(r io.Reader, max int) (string, error) { +func limitedString(r io.Reader, maxLength int) (string, error) { var msg strings.Builder - if _, err := io.Copy(&msg, io.LimitReader(r, int64(max))); err != nil { + if _, err := io.Copy(&msg, io.LimitReader(r, int64(maxLength))); err != nil { return "", err } - if msg.Len() >= max { + if msg.Len() >= maxLength { msg.WriteString("...") } return msg.String(), nil diff --git a/cmd/csaf_provider/main.go b/cmd/csaf_provider/main.go index 6c858c9..3faebfe 100644 --- a/cmd/csaf_provider/main.go +++ b/cmd/csaf_provider/main.go @@ -48,7 +48,7 @@ func main() { cfg, err := loadConfig() if err != nil { - cgi.Serve(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + cgi.Serve(http.HandlerFunc(func(rw http.ResponseWriter, _ *http.Request) { http.Error(rw, "Something went wrong. Check server logs for more details", http.StatusInternalServerError) })) diff --git a/internal/options/options_test.go b/internal/options/options_test.go index 9aab23b..2768e37 100644 --- a/internal/options/options_test.go +++ b/internal/options/options_test.go @@ -37,10 +37,10 @@ func TestParse(t *testing.T) { }, Usage: "[OPTIONS] domain...", HasVersion: func(cfg *config) bool { return cfg.Version }, - SetDefaults: func(cfg *config) { + SetDefaults: func(_ *config) { }, // Re-establish default values if not set. - EnsureDefaults: func(cfg *config) { + EnsureDefaults: func(_ *config) { }, } @@ -157,7 +157,6 @@ func TestErrorCheck(t *testing.T) { return } t.Fatalf("process ran with err %v, want exit status 1", err) - } // TestSecondPassCommandlineParsing checks if the second pass @@ -168,7 +167,7 @@ func TestSecondPassCommandlineParsing(t *testing.T) { os.Args = []string{"cmd"} p := Parser[config]{ - ConfigLocation: func(cfg *config) string { + ConfigLocation: func(_ *config) string { // This is a bit stupid. os.Args = []string{"cmd", "--invalid"} return "data/empty.toml" @@ -188,7 +187,7 @@ func TestSecondPassCommandlineHelp(t *testing.T) { os.Args = []string{"cmd"} p := Parser[config]{ - ConfigLocation: func(cfg *config) string { + ConfigLocation: func(_ *config) string { // This is a bit stupid. os.Args = []string{"cmd", "--help"} return "data/empty.toml" diff --git a/util/file_test.go b/util/file_test.go index 28c5196..ab2a208 100644 --- a/util/file_test.go +++ b/util/file_test.go @@ -155,7 +155,7 @@ func TestMakeUniqFile(t *testing.T) { func Test_mkUniq(t *testing.T) { dir := t.TempDir() - name, err := mkUniq(dir+"/", func(name string) error { + name, err := mkUniq(dir+"/", func(_ string) error { return nil }) if err != nil { From 2c5ef1fd5f47a8c9ad34526a5eef64a2c8b28f9f Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 24 Mar 2025 13:32:43 +0100 Subject: [PATCH 115/117] Avoid memory leak Move `resp.Body.Close()` before check of status code. Reported by @mgoetzegb here: https://github.com/gocsaf/csaf/pull/625#issuecomment-2744067770 --- cmd/csaf_downloader/downloader.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index f0778ee..bcef357 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -781,11 +781,11 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, if err != nil { return nil, nil, err } + defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return nil, nil, fmt.Errorf( "fetching signature from '%s' failed: %s (%d)", p, resp.Status, resp.StatusCode) } - defer resp.Body.Close() data, err := io.ReadAll(resp.Body) if err != nil { return nil, nil, err @@ -846,11 +846,11 @@ func loadHash(client util.Client, p string) ([]byte, []byte, error) { if err != nil { return nil, nil, err } + defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return nil, nil, fmt.Errorf( "fetching hash from '%s' failed: %s (%d)", p, resp.Status, resp.StatusCode) } - defer resp.Body.Close() var data bytes.Buffer tee := io.TeeReader(resp.Body, &data) hash, err := util.HashFromReader(tee) From 2f599ab0175d0d89748f4d539afdc51024332b97 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 2 Apr 2025 17:05:29 +0200 Subject: [PATCH 116/117] Fix aggregator URL handling (#631) * Fix aggregator URL handling Parts of the URL were not path escaped. This results in a wrong URL; if the provider name contains characters that need to be escaped. * Simplify JoinPath usage --- cmd/csaf_aggregator/indices.go | 68 +++++++++++++++++++++----------- cmd/csaf_aggregator/mirror.go | 32 +++++++++------ cmd/csaf_aggregator/processor.go | 13 ++++++ 3 files changed, 78 insertions(+), 35 deletions(-) diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 17c8d3a..976d9a3 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -183,19 +183,26 @@ func (w *worker) writeROLIENoSummaries(label string) error { fname := "csaf-feed-tlp-" + labelFolder + ".json" - feedURL := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + labelFolder + "/" + fname + feedURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + feedURL = feedURL.JoinPath(labelFolder, fname) links := []csaf.Link{{ Rel: "self", - HRef: feedURL, + HRef: feedURL.String(), }} if w.provider.serviceDocument(w.processor.cfg) { + serviceURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + serviceURL = serviceURL.JoinPath("service.json") links = append(links, csaf.Link{ - Rel: "service", - HRef: w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/service.json", + Rel: "service", + HRef: serviceURL.String(), }) } @@ -223,8 +230,11 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { fname := "csaf-feed-tlp-" + labelFolder + ".json" - feedURL := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + labelFolder + "/" + fname + feedURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + feedURL = feedURL.JoinPath(labelFolder, fname) entries := make([]*csaf.Entry, len(summaries)) @@ -236,10 +246,13 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { for i := range summaries { s := &summaries[i] - csafURL := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + label + "/" + - strconv.Itoa(s.summary.InitialReleaseDate.Year()) + "/" + - s.filename + csafURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + csafURLString := csafURL.JoinPath(label, + strconv.Itoa(s.summary.InitialReleaseDate.Year()), + s.filename).String() entries[i] = &csaf.Entry{ ID: s.summary.ID, @@ -247,15 +260,15 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { Published: csaf.TimeStamp(s.summary.InitialReleaseDate), Updated: csaf.TimeStamp(s.summary.CurrentReleaseDate), Link: []csaf.Link{ - {Rel: "self", HRef: csafURL}, - {Rel: "hash", HRef: csafURL + ".sha256"}, - {Rel: "hash", HRef: csafURL + ".sha512"}, - {Rel: "signature", HRef: csafURL + ".asc"}, + {Rel: "self", HRef: csafURLString}, + {Rel: "hash", HRef: csafURLString + ".sha256"}, + {Rel: "hash", HRef: csafURLString + ".sha512"}, + {Rel: "signature", HRef: csafURLString + ".asc"}, }, Format: format, Content: csaf.Content{ Type: "application/json", - Src: csafURL, + Src: csafURLString, }, } if s.summary.Summary != "" { @@ -267,14 +280,18 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { links := []csaf.Link{{ Rel: "self", - HRef: feedURL, + HRef: feedURL.String(), }} if w.provider.serviceDocument(w.processor.cfg) { + serviceURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + serviceURL = serviceURL.JoinPath("service.json") links = append(links, csaf.Link{ - Rel: "service", - HRef: w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/service.json", + Rel: "service", + HRef: serviceURL.String(), }) } @@ -344,12 +361,15 @@ func (w *worker) writeService() error { for _, ts := range labels { feedName := "csaf-feed-tlp-" + ts + ".json" - href := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + ts + "/" + feedName + hrefURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + hrefURL = hrefURL.JoinPath(ts, feedName) collection := csaf.ROLIEServiceWorkspaceCollection{ Title: "CSAF feed (TLP:" + strings.ToUpper(ts) + ")", - HRef: href, + HRef: hrefURL.String(), Categories: categories, } collections = append(collections, collection) diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index e7c5154..1ef5881 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -103,9 +103,13 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) { } // Add us as a mirror. + mirror, err := w.getProviderBaseURL() + if err != nil { + return nil, err + } mirrorURL := csaf.ProviderURL( - fmt.Sprintf("%s/.well-known/csaf-aggregator/%s/provider-metadata.json", - w.processor.cfg.Domain, w.provider.Name)) + mirror.JoinPath("provider-metadata.json").String(), + ) acp.Mirrors = []csaf.ProviderURL{ mirrorURL, @@ -128,8 +132,12 @@ func (w *worker) writeProviderMetadata() error { fname := filepath.Join(w.dir, "provider-metadata.json") + prefixURL, err := w.getProviderBaseURL() + if err != nil { + return err + } pm := csaf.NewProviderMetadataPrefix( - w.processor.cfg.Domain+"/.well-known/csaf-aggregator/"+w.provider.Name, + prefixURL.String(), w.labelsFromSummaries()) // Fill in directory URLs if needed. @@ -139,9 +147,8 @@ func (w *worker) writeProviderMetadata() error { labels = append(labels, label) } sort.Strings(labels) - prefix := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + w.provider.Name + "/" for _, label := range labels { - pm.AddDirectoryDistribution(prefix + label) + pm.AddDirectoryDistribution(prefixURL.JoinPath(label).String()) } } @@ -188,9 +195,12 @@ func (w *worker) mirrorPGPKeys(pm *csaf.ProviderMetadata) error { return err } + keyURL, err := w.getProviderBaseURL() + if err != nil { + return err + } localKeyURL := func(fingerprint string) string { - return fmt.Sprintf("%s/.well-known/csaf-aggregator/%s/openpgp/%s.asc", - w.processor.cfg.Domain, w.provider.Name, fingerprint) + return keyURL.JoinPath("openpgp", (fingerprint + ".asc")).String() } for i := range pm.PGPKeys { @@ -240,8 +250,8 @@ func (w *worker) mirrorPGPKeys(pm *csaf.ProviderMetadata) error { } // replace the URL - url := localKeyURL(fingerprint) - pgpKey.URL = &url + u := localKeyURL(fingerprint) + pgpKey.URL = &u } // If we have public key configured copy it into the new folder @@ -308,7 +318,7 @@ func (w *worker) createAggregatorProvider() (*csaf.AggregatorCSAFProvider, error var ( lastUpdated = csaf.TimeStamp(lastUpdatedT) role = csaf.MetadataRole(roleS) - url = csaf.ProviderURL(urlS) + providerURL = csaf.ProviderURL(urlS) ) return &csaf.AggregatorCSAFProvider{ @@ -316,7 +326,7 @@ func (w *worker) createAggregatorProvider() (*csaf.AggregatorCSAFProvider, error LastUpdated: &lastUpdated, Publisher: &pub, Role: &role, - URL: &url, + URL: &providerURL, }, }, nil } diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index b22e839..0d41df8 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -11,6 +11,7 @@ package main import ( "fmt" "log/slog" + "net/url" "os" "path/filepath" @@ -112,6 +113,18 @@ func (w *worker) locateProviderMetadata(domain string) error { return nil } +// getProviderBaseURL returns the base URL for the provider. +func (w *worker) getProviderBaseURL() (*url.URL, error) { + baseURL, err := url.Parse(w.processor.cfg.Domain) + if err != nil { + return nil, err + } + baseURL = baseURL.JoinPath(".well-known", + "csaf-aggregator", + w.provider.Name) + return baseURL, nil +} + // removeOrphans removes the directories that are not in the providers list. func (p *processor) removeOrphans() error { From 3ab00e87594ccad74c40534bbad3f4028abdb5f3 Mon Sep 17 00:00:00 2001 From: Christoph Klassen <100708552+cintek@users.noreply.github.com> Date: Wed, 28 May 2025 11:30:46 +0200 Subject: [PATCH 117/117] Remove golint github action We use Revive already which is a replacement for golint and golint isn't maintained anyway. --- .github/workflows/go.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index b86309f..6b07bfd 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -30,9 +30,6 @@ jobs: with: gofmt-flags: "-l -d" - - name: golint - uses: Jerome1337/golint-action@v1.0.3 - - name: Revive Action uses: morphy2k/revive-action@v2.7.4