mirror of
https://github.com/gocsaf/csaf.git
synced 2025-12-22 05:40:11 +01:00
commit
3e16741ed5
46 changed files with 3835 additions and 98 deletions
67
cmd/csaf_aggregator/client_test.go
Normal file
67
cmd/csaf_aggregator/client_test.go
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
func Test_downloadJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
statusCode int
|
||||
contentType string
|
||||
wantErr error
|
||||
}{
|
||||
{
|
||||
name: "status ok, application/json",
|
||||
statusCode: http.StatusOK,
|
||||
contentType: "application/json",
|
||||
wantErr: nil,
|
||||
},
|
||||
{
|
||||
name: "status found, application/json",
|
||||
statusCode: http.StatusFound,
|
||||
contentType: "application/json",
|
||||
wantErr: errNotFound,
|
||||
},
|
||||
{
|
||||
name: "status ok, application/xml",
|
||||
statusCode: http.StatusOK,
|
||||
contentType: "application/xml",
|
||||
wantErr: errNotFound,
|
||||
},
|
||||
}
|
||||
|
||||
t.Parallel()
|
||||
for _, testToRun := range tests {
|
||||
test := testToRun
|
||||
t.Run(test.name, func(tt *testing.T) {
|
||||
tt.Parallel()
|
||||
found := func(r io.Reader) error {
|
||||
return nil
|
||||
}
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Add("Content-Type", test.contentType)
|
||||
w.WriteHeader(test.statusCode)
|
||||
}))
|
||||
defer server.Close()
|
||||
hClient := http.Client{}
|
||||
client := util.Client(&hClient)
|
||||
if gotErr := downloadJSON(client, server.URL, found); gotErr != test.wantErr {
|
||||
t.Errorf("downloadJSON: Expected %q but got %q.", test.wantErr, gotErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -10,8 +10,12 @@ package main
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
const page0 = `<html>
|
||||
|
|
@ -31,7 +35,6 @@ const page0 = `<html>
|
|||
</html>`
|
||||
|
||||
func TestLinksOnPage(t *testing.T) {
|
||||
|
||||
var links []string
|
||||
|
||||
err := linksOnPage(
|
||||
|
|
@ -58,3 +61,78 @@ func TestLinksOnPage(t *testing.T) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Test_listed(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
badDirs util.Set[string]
|
||||
path string
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "listed path",
|
||||
badDirs: util.Set[string]{},
|
||||
path: "/white/avendor-advisory-0004.json",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "badDirs contains path",
|
||||
badDirs: util.Set[string]{"/white/": {}},
|
||||
path: "/white/avendor-advisory-0004.json",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "not found",
|
||||
badDirs: util.Set[string]{},
|
||||
path: "/not-found/resource.json",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "badDirs does not contain path",
|
||||
badDirs: util.Set[string]{"/bad-dir/": {}},
|
||||
path: "/white/avendor-advisory-0004.json",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "unlisted path",
|
||||
badDirs: util.Set[string]{},
|
||||
path: "/white/avendor-advisory-0004-not-listed.json",
|
||||
want: false,
|
||||
},
|
||||
}
|
||||
|
||||
t.Parallel()
|
||||
for _, testToRun := range tests {
|
||||
test := testToRun
|
||||
t.Run(test.name, func(tt *testing.T) {
|
||||
tt.Parallel()
|
||||
serverURL := ""
|
||||
fs := http.FileServer(http.Dir("../../testdata/simple-directory-provider"))
|
||||
server := httptest.NewTLSServer(fs)
|
||||
defer server.Close()
|
||||
|
||||
serverURL = server.URL
|
||||
|
||||
hClient := server.Client()
|
||||
client := util.Client(hClient)
|
||||
|
||||
pgs := pages{}
|
||||
cfg := config{RemoteValidator: "", RemoteValidatorCache: ""}
|
||||
p, err := newProcessor(&cfg)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
p.client = client
|
||||
|
||||
badDirs := util.Set[string]{}
|
||||
for dir := range test.badDirs {
|
||||
badDirs.Add(serverURL + dir)
|
||||
}
|
||||
|
||||
got, _ := pgs.listed(serverURL+test.path, p, badDirs)
|
||||
if got != test.want {
|
||||
t.Errorf("%q: Expected %t but got %t.", test.name, test.want, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -83,10 +83,8 @@ type reporter interface {
|
|||
report(*processor, *Domain)
|
||||
}
|
||||
|
||||
var (
|
||||
// errContinue indicates that the current check should continue.
|
||||
errContinue = errors.New("continue")
|
||||
)
|
||||
// errContinue indicates that the current check should continue.
|
||||
var errContinue = errors.New("continue")
|
||||
|
||||
type whereType byte
|
||||
|
||||
|
|
@ -138,7 +136,7 @@ func (m *topicMessages) info(format string, args ...any) {
|
|||
m.add(InfoType, format, args...)
|
||||
}
|
||||
|
||||
// use signals that we going to use this topic.
|
||||
// use signals that we're going to use this topic.
|
||||
func (m *topicMessages) use() {
|
||||
if *m == nil {
|
||||
*m = []Message{}
|
||||
|
|
@ -164,9 +162,8 @@ func (m *topicMessages) hasErrors() bool {
|
|||
return false
|
||||
}
|
||||
|
||||
// newProcessor returns an initilaized processor.
|
||||
// newProcessor returns an initialized processor.
|
||||
func newProcessor(cfg *config) (*processor, error) {
|
||||
|
||||
var validator csaf.RemoteValidator
|
||||
|
||||
if cfg.RemoteValidator != "" {
|
||||
|
|
@ -239,7 +236,6 @@ func (p *processor) reset() {
|
|||
// Then it calls the report method on each report from the given "reporters" parameter for each domain.
|
||||
// It returns a pointer to the report and nil, otherwise an error.
|
||||
func (p *processor) run(domains []string) (*Report, error) {
|
||||
|
||||
report := Report{
|
||||
Date: ReportTime{Time: time.Now().UTC()},
|
||||
Version: util.SemVersion,
|
||||
|
|
@ -296,7 +292,6 @@ func (p *processor) run(domains []string) (*Report, error) {
|
|||
|
||||
// fillMeta fills the report with extra informations from provider metadata.
|
||||
func (p *processor) fillMeta(domain *Domain) error {
|
||||
|
||||
if p.pmd == nil {
|
||||
return nil
|
||||
}
|
||||
|
|
@ -322,7 +317,6 @@ func (p *processor) fillMeta(domain *Domain) error {
|
|||
// domainChecks compiles a list of checks which should be performed
|
||||
// for a given domain.
|
||||
func (p *processor) domainChecks(domain string) []func(*processor, string) error {
|
||||
|
||||
// If we have a direct domain url we dont need to
|
||||
// perform certain checks.
|
||||
direct := strings.HasPrefix(domain, "https://")
|
||||
|
|
@ -392,7 +386,6 @@ func (p *processor) markChecked(s string, mask whereType) bool {
|
|||
}
|
||||
|
||||
func (p *processor) checkRedirect(r *http.Request, via []*http.Request) error {
|
||||
|
||||
url := r.URL.String()
|
||||
p.checkTLS(url)
|
||||
if p.redirects == nil {
|
||||
|
|
@ -494,7 +487,6 @@ func (p *processor) usedAuthorizedClient() bool {
|
|||
|
||||
// rolieFeedEntries loads the references to the advisory files for a given feed.
|
||||
func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) {
|
||||
|
||||
client := p.httpClient()
|
||||
res, err := client.Get(feed)
|
||||
p.badDirListings.use()
|
||||
|
|
@ -545,7 +537,6 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) {
|
|||
var files []csaf.AdvisoryFile
|
||||
|
||||
rfeed.Entries(func(entry *csaf.Entry) {
|
||||
|
||||
// Filter if we have date checking.
|
||||
if accept := p.cfg.Range; accept != nil {
|
||||
if t := time.Time(entry.Updated); !t.IsZero() && !accept.Contains(t) {
|
||||
|
|
@ -594,11 +585,17 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) {
|
|||
|
||||
var file csaf.AdvisoryFile
|
||||
|
||||
if sha256 != "" || sha512 != "" || sign != "" {
|
||||
file = csaf.HashedAdvisoryFile{url, sha256, sha512, sign}
|
||||
} else {
|
||||
file = csaf.PlainAdvisoryFile(url)
|
||||
switch {
|
||||
case sha256 == "" && sha512 != "":
|
||||
p.badROLIEFeed.info("%s has no sha256 hash file listed", url)
|
||||
case sha256 != "" && sha512 == "":
|
||||
p.badROLIEFeed.info("%s has no sha512 hash file listed", url)
|
||||
case sha256 == "" && sha512 == "":
|
||||
p.badROLIEFeed.error("No hash listed on ROLIE feed %s", url)
|
||||
case sign == "":
|
||||
p.badROLIEFeed.error("No signature listed on ROLIE feed %s", url)
|
||||
}
|
||||
file = csaf.PlainAdvisoryFile{Path: url, SHA256: sha256, SHA512: sha512, Sign: sign}
|
||||
|
||||
files = append(files, file)
|
||||
})
|
||||
|
|
@ -753,14 +750,23 @@ func (p *processor) integrity(
|
|||
// Check hashes
|
||||
p.badIntegrities.use()
|
||||
|
||||
for _, x := range []struct {
|
||||
type hash struct {
|
||||
ext string
|
||||
url func() string
|
||||
hash []byte
|
||||
}{
|
||||
{"SHA256", f.SHA256URL, s256.Sum(nil)},
|
||||
{"SHA512", f.SHA512URL, s512.Sum(nil)},
|
||||
} {
|
||||
}
|
||||
hashes := []hash{}
|
||||
if f.SHA256URL() != "" {
|
||||
hashes = append(hashes, hash{"SHA256", f.SHA256URL, s256.Sum(nil)})
|
||||
}
|
||||
if f.SHA512URL() != "" {
|
||||
hashes = append(hashes, hash{"SHA512", f.SHA512URL, s512.Sum(nil)})
|
||||
}
|
||||
|
||||
couldFetchHash := false
|
||||
hashFetchErrors := []string{}
|
||||
|
||||
for _, x := range hashes {
|
||||
hu, err := url.Parse(x.url())
|
||||
if err != nil {
|
||||
lg(ErrorType, "Bad URL %s: %v", x.url(), err)
|
||||
|
|
@ -771,14 +777,15 @@ func (p *processor) integrity(
|
|||
|
||||
p.checkTLS(hashFile)
|
||||
if res, err = client.Get(hashFile); err != nil {
|
||||
p.badIntegrities.error("Fetching %s failed: %v.", hashFile, err)
|
||||
hashFetchErrors = append(hashFetchErrors, fmt.Sprintf("Fetching %s failed: %v.", hashFile, err))
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
p.badIntegrities.error("Fetching %s failed: Status code %d (%s)",
|
||||
hashFile, res.StatusCode, res.Status)
|
||||
hashFetchErrors = append(hashFetchErrors, fmt.Sprintf("Fetching %s failed: Status code %d (%s)",
|
||||
hashFile, res.StatusCode, res.Status))
|
||||
continue
|
||||
}
|
||||
couldFetchHash = true
|
||||
h, err := func() ([]byte, error) {
|
||||
defer res.Body.Close()
|
||||
return util.HashFromReader(res.Body)
|
||||
|
|
@ -796,6 +803,19 @@ func (p *processor) integrity(
|
|||
x.ext, u, hashFile)
|
||||
}
|
||||
}
|
||||
|
||||
msgType := ErrorType
|
||||
// Log only as warning, if the other hash could be fetched
|
||||
if couldFetchHash {
|
||||
msgType = WarnType
|
||||
}
|
||||
if f.IsDirectory() {
|
||||
msgType = InfoType
|
||||
}
|
||||
for _, fetchError := range hashFetchErrors {
|
||||
p.badIntegrities.add(msgType, fetchError)
|
||||
}
|
||||
|
||||
// Check signature
|
||||
su, err := url.Parse(f.SignURL())
|
||||
if err != nil {
|
||||
|
|
@ -888,7 +908,8 @@ func (p *processor) checkIndex(base string, mask whereType) error {
|
|||
p.badIntegrities.error("index.txt contains invalid URL %q in line %d", u, line)
|
||||
continue
|
||||
}
|
||||
files = append(files, csaf.PlainAdvisoryFile(u))
|
||||
|
||||
files = append(files, csaf.DirectoryAdvisoryFile{Path: u})
|
||||
}
|
||||
return files, scanner.Err()
|
||||
}()
|
||||
|
|
@ -911,7 +932,6 @@ func (p *processor) checkIndex(base string, mask whereType) error {
|
|||
// of the fields' values and if they are sorted properly. Then it passes the files to the
|
||||
// "integrity" functions. It returns error if some test fails, otherwise nil.
|
||||
func (p *processor) checkChanges(base string, mask whereType) error {
|
||||
|
||||
bu, err := url.Parse(base)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -970,9 +990,9 @@ func (p *processor) checkChanges(base string, mask whereType) error {
|
|||
continue
|
||||
}
|
||||
path := r[pathColumn]
|
||||
times, files =
|
||||
append(times, t),
|
||||
append(files, csaf.PlainAdvisoryFile(path))
|
||||
|
||||
times, files = append(times, t),
|
||||
append(files, csaf.DirectoryAdvisoryFile{Path: path})
|
||||
}
|
||||
return times, files, nil
|
||||
}()
|
||||
|
|
@ -1144,7 +1164,6 @@ func (p *processor) checkMissing(string) error {
|
|||
// checkInvalid goes over all found adivisories URLs and checks
|
||||
// if file name conforms to standard.
|
||||
func (p *processor) checkInvalid(string) error {
|
||||
|
||||
p.badDirListings.use()
|
||||
var invalids []string
|
||||
|
||||
|
|
@ -1166,7 +1185,6 @@ func (p *processor) checkInvalid(string) error {
|
|||
// checkListing goes over all found adivisories URLs and checks
|
||||
// if their parent directory is listable.
|
||||
func (p *processor) checkListing(string) error {
|
||||
|
||||
p.badDirListings.use()
|
||||
|
||||
pgs := pages{}
|
||||
|
|
@ -1201,7 +1219,6 @@ func (p *processor) checkListing(string) error {
|
|||
// checkWhitePermissions checks if the TLP:WHITE advisories are
|
||||
// available with unprotected access.
|
||||
func (p *processor) checkWhitePermissions(string) error {
|
||||
|
||||
var ids []string
|
||||
for id, open := range p.labelChecker.whiteAdvisories {
|
||||
if !open {
|
||||
|
|
@ -1227,7 +1244,6 @@ func (p *processor) checkWhitePermissions(string) error {
|
|||
// According to the result, the respective error messages added to
|
||||
// badProviderMetadata.
|
||||
func (p *processor) checkProviderMetadata(domain string) bool {
|
||||
|
||||
p.badProviderMetadata.use()
|
||||
|
||||
client := p.httpClient()
|
||||
|
|
@ -1274,7 +1290,6 @@ func (p *processor) checkSecurity(domain string, legacy bool) (int, string) {
|
|||
|
||||
// checkSecurityFolder checks the security.txt in a given folder.
|
||||
func (p *processor) checkSecurityFolder(folder string) string {
|
||||
|
||||
client := p.httpClient()
|
||||
path := folder + "security.txt"
|
||||
res, err := client.Get(path)
|
||||
|
|
@ -1340,9 +1355,7 @@ func (p *processor) checkSecurityFolder(folder string) string {
|
|||
// checkDNS checks if the "csaf.data.security.domain.tld" DNS record is available
|
||||
// and serves the "provider-metadata.json".
|
||||
func (p *processor) checkDNS(domain string) {
|
||||
|
||||
p.badDNSPath.use()
|
||||
|
||||
client := p.httpClient()
|
||||
path := "https://csaf.data.security." + domain
|
||||
res, err := client.Get(path)
|
||||
|
|
@ -1352,9 +1365,8 @@ func (p *processor) checkDNS(domain string) {
|
|||
return
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
p.badDNSPath.add(ErrorType,
|
||||
fmt.Sprintf("Fetching %s failed. Status code %d (%s)",
|
||||
path, res.StatusCode, res.Status))
|
||||
p.badDNSPath.add(ErrorType, fmt.Sprintf("Fetching %s failed. Status code %d (%s)",
|
||||
path, res.StatusCode, res.Status))
|
||||
}
|
||||
hash := sha256.New()
|
||||
defer res.Body.Close()
|
||||
|
|
@ -1376,7 +1388,6 @@ func (p *processor) checkDNS(domain string) {
|
|||
func (p *processor) checkWellknown(domain string) {
|
||||
|
||||
p.badWellknownMetadata.use()
|
||||
|
||||
client := p.httpClient()
|
||||
path := "https://" + domain + "/.well-known/csaf/provider-metadata.json"
|
||||
|
||||
|
|
@ -1405,9 +1416,7 @@ func (p *processor) checkWellknown(domain string) {
|
|||
// for the legacy location will be made. If this fails as well, then an
|
||||
// error is given.
|
||||
func (p *processor) checkWellknownSecurityDNS(domain string) error {
|
||||
|
||||
p.checkWellknown(domain)
|
||||
|
||||
// Security check for well known (default) and legacy location
|
||||
warnings, sDMessage := p.checkSecurity(domain, false)
|
||||
// if the security.txt under .well-known was not okay
|
||||
|
|
@ -1445,7 +1454,6 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error {
|
|||
// As a result of these checks respective error messages are passed
|
||||
// to badPGP methods. It returns nil if all checks are passed.
|
||||
func (p *processor) checkPGPKeys(_ string) error {
|
||||
|
||||
p.badPGPs.use()
|
||||
|
||||
src, err := p.expr.Eval("$.public_openpgp_keys", p.pmd)
|
||||
|
|
@ -1504,7 +1512,6 @@ func (p *processor) checkPGPKeys(_ string) error {
|
|||
defer res.Body.Close()
|
||||
return crypto.NewKeyFromArmoredReader(res.Body)
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
p.badPGPs.error("Reading public OpenPGP key %s failed: %v", u, err)
|
||||
continue
|
||||
|
|
|
|||
206
cmd/csaf_checker/processor_test.go
Normal file
206
cmd/csaf_checker/processor_test.go
Normal file
|
|
@ -0,0 +1,206 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"reflect"
|
||||
"testing"
|
||||
"text/template"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/testutil"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
func getRequirementTestData(t *testing.T, params testutil.ProviderParams, directoryProvider bool) []Requirement {
|
||||
path := "../../testdata/processor-requirements/"
|
||||
if params.EnableSha256 {
|
||||
path += "sha256-"
|
||||
}
|
||||
if params.EnableSha512 {
|
||||
path += "sha512-"
|
||||
}
|
||||
if params.ForbidSha256 {
|
||||
path += "forbid-sha256-"
|
||||
}
|
||||
if params.ForbidSha512 {
|
||||
path += "forbid-sha512-"
|
||||
}
|
||||
if directoryProvider {
|
||||
path += "directory"
|
||||
} else {
|
||||
path += "rolie"
|
||||
}
|
||||
path += ".json"
|
||||
|
||||
content, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
tmplt, err := template.New("base").Parse(string(content))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
var output bytes.Buffer
|
||||
err = tmplt.Execute(&output, params)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
var requirement []Requirement
|
||||
err = json.Unmarshal(output.Bytes(), &requirement)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return requirement
|
||||
}
|
||||
|
||||
func TestShaMarking(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
directoryProvider bool
|
||||
enableSha256 bool
|
||||
enableSha512 bool
|
||||
forbidSha256 bool
|
||||
forbidSha512 bool
|
||||
}{
|
||||
{
|
||||
name: "deliver sha256 and sha512",
|
||||
directoryProvider: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
},
|
||||
{
|
||||
name: "enable sha256 and sha512, forbid fetching",
|
||||
directoryProvider: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
forbidSha256: true,
|
||||
forbidSha512: true,
|
||||
},
|
||||
{
|
||||
name: "enable sha256 and sha512, forbid sha256",
|
||||
directoryProvider: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
forbidSha256: true,
|
||||
forbidSha512: false,
|
||||
},
|
||||
{
|
||||
name: "enable sha256 and sha512, forbid sha512",
|
||||
directoryProvider: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
forbidSha256: false,
|
||||
forbidSha512: true,
|
||||
},
|
||||
{
|
||||
name: "only deliver sha256",
|
||||
directoryProvider: false,
|
||||
enableSha256: true,
|
||||
enableSha512: false,
|
||||
},
|
||||
{
|
||||
name: "only deliver sha512",
|
||||
directoryProvider: false,
|
||||
enableSha256: false,
|
||||
enableSha512: true,
|
||||
},
|
||||
{
|
||||
name: "deliver sha256 and sha512, directory provider",
|
||||
directoryProvider: true,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
},
|
||||
{
|
||||
name: "only deliver sha256, directory provider",
|
||||
directoryProvider: true,
|
||||
enableSha256: true,
|
||||
enableSha512: false,
|
||||
},
|
||||
{
|
||||
name: "only deliver sha512, directory provider",
|
||||
directoryProvider: true,
|
||||
enableSha256: false,
|
||||
enableSha512: true,
|
||||
},
|
||||
{
|
||||
name: "no hash",
|
||||
directoryProvider: false,
|
||||
enableSha256: false,
|
||||
enableSha512: false,
|
||||
},
|
||||
{
|
||||
name: "no hash, directory provider",
|
||||
directoryProvider: true,
|
||||
enableSha256: false,
|
||||
enableSha512: false,
|
||||
},
|
||||
}
|
||||
|
||||
t.Parallel()
|
||||
for _, testToRun := range tests {
|
||||
test := testToRun
|
||||
t.Run(test.name, func(tt *testing.T) {
|
||||
tt.Parallel()
|
||||
serverURL := ""
|
||||
params := testutil.ProviderParams{
|
||||
URL: "",
|
||||
EnableSha256: test.enableSha256,
|
||||
EnableSha512: test.enableSha512,
|
||||
ForbidSha256: test.forbidSha256,
|
||||
ForbidSha512: test.forbidSha512,
|
||||
}
|
||||
server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider))
|
||||
defer server.Close()
|
||||
|
||||
serverURL = server.URL
|
||||
params.URL = server.URL
|
||||
|
||||
hClient := server.Client()
|
||||
client := util.Client(hClient)
|
||||
|
||||
cfg := config{}
|
||||
err := cfg.prepare()
|
||||
if err != nil {
|
||||
t.Fatalf("SHA marking config failed: %v", err)
|
||||
}
|
||||
p, err := newProcessor(&cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("could not init downloader: %v", err)
|
||||
}
|
||||
p.client = client
|
||||
|
||||
report, err := p.run([]string{serverURL + "/provider-metadata.json"})
|
||||
if err != nil {
|
||||
t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err)
|
||||
}
|
||||
expected := getRequirementTestData(t,
|
||||
testutil.ProviderParams{
|
||||
URL: serverURL,
|
||||
EnableSha256: test.enableSha256,
|
||||
EnableSha512: test.enableSha512,
|
||||
ForbidSha256: test.forbidSha256,
|
||||
ForbidSha512: test.forbidSha512,
|
||||
},
|
||||
test.directoryProvider)
|
||||
for i, got := range report.Domains[0].Requirements {
|
||||
if !reflect.DeepEqual(expected[i], *got) {
|
||||
t.Errorf("SHA marking %v: Expected %v, got %v", test.name, expected[i], *got)
|
||||
}
|
||||
}
|
||||
|
||||
p.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -41,6 +41,13 @@ const (
|
|||
validationUnsafe = validationMode("unsafe")
|
||||
)
|
||||
|
||||
type hashAlgorithm string
|
||||
|
||||
const (
|
||||
algSha256 = hashAlgorithm("sha256")
|
||||
algSha512 = hashAlgorithm("sha512")
|
||||
)
|
||||
|
||||
type config struct {
|
||||
Directory string `short:"d" long:"directory" description:"DIRectory to store the downloaded files in" value-name:"DIR" toml:"directory"`
|
||||
Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider" toml:"insecure"`
|
||||
|
|
@ -79,6 +86,9 @@ type config struct {
|
|||
|
||||
clientCerts []tls.Certificate
|
||||
ignorePattern filter.PatternMatcher
|
||||
|
||||
//lint:ignore SA5008 We are using choice or than once: sha256, sha512
|
||||
PreferredHash hashAlgorithm `long:"preferred_hash" choice:"sha256" choice:"sha512" value-name:"HASH" description:"HASH to prefer" toml:"preferred_hash"`
|
||||
}
|
||||
|
||||
// configPaths are the potential file locations of the config file.
|
||||
|
|
@ -220,7 +230,7 @@ func (cfg *config) prepareLogging() error {
|
|||
w = f
|
||||
}
|
||||
ho := slog.HandlerOptions{
|
||||
//AddSource: true,
|
||||
// AddSource: true,
|
||||
Level: cfg.LogLevel.Level,
|
||||
ReplaceAttr: dropSubSeconds,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ import (
|
|||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
|
@ -37,8 +38,16 @@ import (
|
|||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
type hashFetchInfo struct {
|
||||
url string
|
||||
preferred bool
|
||||
warn bool
|
||||
hashType hashAlgorithm
|
||||
}
|
||||
|
||||
type downloader struct {
|
||||
cfg *config
|
||||
client *util.Client // Used for testing
|
||||
keys *crypto.KeyRing
|
||||
validator csaf.RemoteValidator
|
||||
forwarder *forwarder
|
||||
|
|
@ -53,7 +62,6 @@ type downloader struct {
|
|||
const failedValidationDir = "failed_validation"
|
||||
|
||||
func newDownloader(cfg *config) (*downloader, error) {
|
||||
|
||||
var validator csaf.RemoteValidator
|
||||
|
||||
if cfg.RemoteValidator != "" {
|
||||
|
|
@ -103,7 +111,6 @@ func logRedirect(req *http.Request, via []*http.Request) error {
|
|||
}
|
||||
|
||||
func (d *downloader) httpClient() util.Client {
|
||||
|
||||
hClient := http.Client{}
|
||||
|
||||
if d.cfg.verbose() {
|
||||
|
|
@ -126,6 +133,11 @@ func (d *downloader) httpClient() util.Client {
|
|||
|
||||
client := util.Client(&hClient)
|
||||
|
||||
// Overwrite for testing purposes
|
||||
if d.client != nil {
|
||||
client = *d.client
|
||||
}
|
||||
|
||||
// Add extra headers.
|
||||
client = &util.HeaderClient{
|
||||
Client: client,
|
||||
|
|
@ -252,7 +264,6 @@ func (d *downloader) downloadFiles(
|
|||
label csaf.TLPLabel,
|
||||
files []csaf.AdvisoryFile,
|
||||
) error {
|
||||
|
||||
var (
|
||||
advisoryCh = make(chan csaf.AdvisoryFile)
|
||||
errorCh = make(chan error)
|
||||
|
|
@ -302,7 +313,6 @@ func (d *downloader) loadOpenPGPKeys(
|
|||
base *url.URL,
|
||||
expr *util.PathEval,
|
||||
) error {
|
||||
|
||||
src, err := expr.Eval("$.public_openpgp_keys", doc)
|
||||
if err != nil {
|
||||
// no keys.
|
||||
|
|
@ -356,7 +366,6 @@ func (d *downloader) loadOpenPGPKeys(
|
|||
defer res.Body.Close()
|
||||
return crypto.NewKeyFromArmoredReader(res.Body)
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
slog.Warn(
|
||||
"Reading public OpenPGP key failed",
|
||||
|
|
@ -500,24 +509,42 @@ nextAdvisory:
|
|||
signData []byte
|
||||
)
|
||||
|
||||
// Only hash when we have a remote counter part we can compare it with.
|
||||
if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil {
|
||||
slog.Warn("Cannot fetch SHA256",
|
||||
"url", file.SHA256URL(),
|
||||
"error", err)
|
||||
hashToFetch := []hashFetchInfo{}
|
||||
if file.SHA512URL() != "" {
|
||||
hashToFetch = append(hashToFetch, hashFetchInfo{
|
||||
url: file.SHA512URL(),
|
||||
warn: true,
|
||||
hashType: algSha512,
|
||||
preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512)),
|
||||
})
|
||||
} else {
|
||||
s256 = sha256.New()
|
||||
writers = append(writers, s256)
|
||||
slog.Info("SHA512 not present")
|
||||
}
|
||||
if file.SHA256URL() != "" {
|
||||
hashToFetch = append(hashToFetch, hashFetchInfo{
|
||||
url: file.SHA256URL(),
|
||||
warn: true,
|
||||
hashType: algSha256,
|
||||
preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256)),
|
||||
})
|
||||
} else {
|
||||
slog.Info("SHA256 not present")
|
||||
}
|
||||
if file.IsDirectory() {
|
||||
for i := range hashToFetch {
|
||||
hashToFetch[i].warn = false
|
||||
}
|
||||
}
|
||||
|
||||
if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil {
|
||||
slog.Warn("Cannot fetch SHA512",
|
||||
"url", file.SHA512URL(),
|
||||
"error", err)
|
||||
} else {
|
||||
remoteSHA256, s256Data, remoteSHA512, s512Data = loadHashes(client, hashToFetch)
|
||||
if remoteSHA512 != nil {
|
||||
s512 = sha512.New()
|
||||
writers = append(writers, s512)
|
||||
}
|
||||
if remoteSHA256 != nil {
|
||||
s256 = sha256.New()
|
||||
writers = append(writers, s256)
|
||||
}
|
||||
|
||||
// Remember the data as we need to store it to file later.
|
||||
data.Reset()
|
||||
|
|
@ -747,6 +774,50 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte,
|
|||
return sign, data, nil
|
||||
}
|
||||
|
||||
func loadHashes(client util.Client, hashes []hashFetchInfo) ([]byte, []byte, []byte, []byte) {
|
||||
var remoteSha256, remoteSha512, sha256Data, sha512Data []byte
|
||||
|
||||
// Load preferred hashes first
|
||||
slices.SortStableFunc(hashes, func(a, b hashFetchInfo) int {
|
||||
if a.preferred == b.preferred {
|
||||
return 0
|
||||
}
|
||||
if a.preferred && !b.preferred {
|
||||
return -1
|
||||
}
|
||||
return 1
|
||||
})
|
||||
for _, h := range hashes {
|
||||
if remote, data, err := loadHash(client, h.url); err != nil {
|
||||
if h.warn {
|
||||
slog.Warn("Cannot fetch hash",
|
||||
"hash", h.hashType,
|
||||
"url", h.url,
|
||||
"error", err)
|
||||
} else {
|
||||
slog.Info("Hash not present", "hash", h.hashType, "file", h.url)
|
||||
}
|
||||
} else {
|
||||
switch h.hashType {
|
||||
case algSha512:
|
||||
{
|
||||
remoteSha512 = remote
|
||||
sha512Data = data
|
||||
}
|
||||
case algSha256:
|
||||
{
|
||||
remoteSha256 = remote
|
||||
sha256Data = data
|
||||
}
|
||||
}
|
||||
if h.preferred {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return remoteSha256, sha256Data, remoteSha512, sha512Data
|
||||
}
|
||||
|
||||
func loadHash(client util.Client, p string) ([]byte, []byte, error) {
|
||||
resp, err := client.Get(p)
|
||||
if err != nil {
|
||||
|
|
|
|||
162
cmd/csaf_downloader/downloader_test.go
Normal file
162
cmd/csaf_downloader/downloader_test.go
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"log/slog"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/options"
|
||||
"github.com/gocsaf/csaf/v3/internal/testutil"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
func checkIfFileExists(path string, t *testing.T) bool {
|
||||
if _, err := os.Stat(path); err == nil {
|
||||
return true
|
||||
} else if errors.Is(err, os.ErrNotExist) {
|
||||
return false
|
||||
} else {
|
||||
t.Fatalf("Failed to check if file exists: %v", err)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func TestShaMarking(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
directoryProvider bool
|
||||
wantSha256 bool
|
||||
wantSha512 bool
|
||||
enableSha256 bool
|
||||
enableSha512 bool
|
||||
preferredHash hashAlgorithm
|
||||
}{
|
||||
{
|
||||
name: "want sha256 and sha512",
|
||||
directoryProvider: false,
|
||||
wantSha256: true,
|
||||
wantSha512: true,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
},
|
||||
{
|
||||
name: "only want sha256",
|
||||
directoryProvider: false,
|
||||
wantSha256: true,
|
||||
wantSha512: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
preferredHash: algSha256,
|
||||
},
|
||||
{
|
||||
name: "only want sha512",
|
||||
directoryProvider: false,
|
||||
wantSha256: false,
|
||||
wantSha512: true,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
preferredHash: algSha512,
|
||||
},
|
||||
{
|
||||
name: "only want sha512",
|
||||
directoryProvider: false,
|
||||
wantSha256: false,
|
||||
wantSha512: true,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
preferredHash: algSha512,
|
||||
},
|
||||
|
||||
{
|
||||
name: "only deliver sha256",
|
||||
directoryProvider: false,
|
||||
wantSha256: true,
|
||||
wantSha512: false,
|
||||
enableSha256: true,
|
||||
enableSha512: false,
|
||||
preferredHash: algSha512,
|
||||
},
|
||||
{
|
||||
name: "only want sha256, directory provider",
|
||||
directoryProvider: true,
|
||||
wantSha256: true,
|
||||
wantSha512: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
preferredHash: algSha256,
|
||||
},
|
||||
{
|
||||
name: "only want sha512, directory provider",
|
||||
directoryProvider: true,
|
||||
wantSha256: false,
|
||||
wantSha512: true,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
preferredHash: algSha512,
|
||||
},
|
||||
}
|
||||
|
||||
t.Parallel()
|
||||
for _, testToRun := range tests {
|
||||
test := testToRun
|
||||
t.Run(test.name, func(tt *testing.T) {
|
||||
tt.Parallel()
|
||||
serverURL := ""
|
||||
params := testutil.ProviderParams{
|
||||
URL: "",
|
||||
EnableSha256: test.enableSha256,
|
||||
EnableSha512: test.enableSha512,
|
||||
}
|
||||
server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider))
|
||||
defer server.Close()
|
||||
|
||||
serverURL = server.URL
|
||||
params.URL = server.URL
|
||||
|
||||
hClient := server.Client()
|
||||
client := util.Client(hClient)
|
||||
|
||||
tempDir := t.TempDir()
|
||||
cfg := config{LogLevel: &options.LogLevel{Level: slog.LevelDebug}, Directory: tempDir, PreferredHash: test.preferredHash}
|
||||
err := cfg.prepare()
|
||||
if err != nil {
|
||||
t.Fatalf("SHA marking config failed: %v", err)
|
||||
}
|
||||
d, err := newDownloader(&cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("could not init downloader: %v", err)
|
||||
}
|
||||
d.client = &client
|
||||
|
||||
ctx := context.Background()
|
||||
err = d.run(ctx, []string{serverURL + "/provider-metadata.json"})
|
||||
if err != nil {
|
||||
t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err)
|
||||
}
|
||||
d.close()
|
||||
|
||||
// Check for downloaded hashes
|
||||
sha256Exists := checkIfFileExists(tempDir+"/white/2020/avendor-advisory-0004.json.sha256", t)
|
||||
sha512Exists := checkIfFileExists(tempDir+"/white/2020/avendor-advisory-0004.json.sha512", t)
|
||||
|
||||
if sha256Exists != test.wantSha256 {
|
||||
t.Errorf("%v: expected sha256 hash present to be %v, got: %v", test.name, test.wantSha256, sha256Exists)
|
||||
}
|
||||
|
||||
if sha512Exists != test.wantSha512 {
|
||||
t.Errorf("%v: expected sha512 hash present to be %v, got: %v", test.name, test.wantSha512, sha512Exists)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue