mirror of
https://github.com/gocsaf/csaf.git
synced 2025-12-22 18:15:42 +01:00
Split code to more files. Mainly move processor code to its own file.
This commit is contained in:
parent
b5c5d8db4b
commit
f2dd5a89a7
3 changed files with 453 additions and 421 deletions
|
|
@ -11,433 +11,20 @@ package main
|
|||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"crypto/tls"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/PaesslerAG/gval"
|
||||
"github.com/PaesslerAG/jsonpath"
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
)
|
||||
|
||||
type processor struct {
|
||||
opts *options
|
||||
redirects map[string]string
|
||||
noneTLS map[string]struct{}
|
||||
alreadyChecked map[string]struct{}
|
||||
pmd256 []byte
|
||||
pmd interface{}
|
||||
builder gval.Language
|
||||
keys []*crypto.KeyRing
|
||||
badHashes []string
|
||||
badSignatures []string
|
||||
}
|
||||
|
||||
type check interface {
|
||||
executionOrder() int
|
||||
run(*processor, string) error
|
||||
report(*processor, *Domain)
|
||||
}
|
||||
|
||||
func newProcessor(opts *options) *processor {
|
||||
return &processor{
|
||||
opts: opts,
|
||||
redirects: map[string]string{},
|
||||
noneTLS: map[string]struct{}{},
|
||||
alreadyChecked: map[string]struct{}{},
|
||||
builder: gval.Full(jsonpath.Language()),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *processor) clean() {
|
||||
for k := range p.redirects {
|
||||
delete(p.redirects, k)
|
||||
}
|
||||
for k := range p.noneTLS {
|
||||
delete(p.noneTLS, k)
|
||||
}
|
||||
for k := range p.alreadyChecked {
|
||||
delete(p.alreadyChecked, k)
|
||||
}
|
||||
p.pmd256 = nil
|
||||
p.pmd = nil
|
||||
p.keys = nil
|
||||
p.badSignatures = nil
|
||||
p.badHashes = nil
|
||||
}
|
||||
|
||||
func (p *processor) run(checks []check, domains []string) (*Report, error) {
|
||||
|
||||
var report Report
|
||||
|
||||
execs := make([]check, len(checks))
|
||||
copy(execs, checks)
|
||||
sort.SliceStable(execs, func(i, j int) bool {
|
||||
return execs[i].executionOrder() < execs[j].executionOrder()
|
||||
})
|
||||
|
||||
for _, d := range domains {
|
||||
for _, ch := range execs {
|
||||
if err := ch.run(p, d); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
domain := &Domain{Name: d}
|
||||
for _, ch := range checks {
|
||||
ch.report(p, domain)
|
||||
}
|
||||
report.Domains = append(report.Domains, domain)
|
||||
p.clean()
|
||||
}
|
||||
|
||||
return &report, nil
|
||||
}
|
||||
|
||||
func (p *processor) jsonPath(expr string) (interface{}, error) {
|
||||
if p.pmd == nil {
|
||||
return nil, errors.New("no provider metadata loaded")
|
||||
}
|
||||
eval, err := p.builder.NewEvaluable(expr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return eval(context.Background(), p.pmd)
|
||||
}
|
||||
|
||||
func (p *processor) checkTLS(u string) {
|
||||
if x, err := url.Parse(u); err == nil && x.Scheme != "https" {
|
||||
p.noneTLS[u] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *processor) markChecked(s string) bool {
|
||||
if _, ok := p.alreadyChecked[s]; ok {
|
||||
return true
|
||||
}
|
||||
p.alreadyChecked[s] = struct{}{}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *processor) checkRedirect(r *http.Request, via []*http.Request) error {
|
||||
|
||||
var path strings.Builder
|
||||
for i, v := range via {
|
||||
if i > 0 {
|
||||
path.WriteString(", ")
|
||||
}
|
||||
path.WriteString(v.URL.String())
|
||||
}
|
||||
url := r.URL.String()
|
||||
p.checkTLS(url)
|
||||
p.redirects[url] = path.String()
|
||||
|
||||
if len(via) > 10 {
|
||||
return errors.New("Too many redirections")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *processor) httpClient() *http.Client {
|
||||
client := http.Client{
|
||||
CheckRedirect: p.checkRedirect,
|
||||
}
|
||||
|
||||
if p.opts.Insecure {
|
||||
client.Transport = &http.Transport{
|
||||
TLSClientConfig: &tls.Config{
|
||||
InsecureSkipVerify: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return &client
|
||||
}
|
||||
|
||||
func (p *processor) addBadHash(format string, args ...interface{}) {
|
||||
p.badHashes = append(p.badHashes, fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
func (p *processor) addBadSignature(format string, args ...interface{}) {
|
||||
p.badSignatures = append(p.badSignatures, fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
func (p *processor) integrity(
|
||||
files []string,
|
||||
base string,
|
||||
lg func(string, ...interface{}),
|
||||
) error {
|
||||
b, err := url.Parse(base)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
client := p.httpClient()
|
||||
for _, f := range files {
|
||||
fp, err := url.Parse(f)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
u := b.ResolveReference(fp).String()
|
||||
if p.markChecked(u) {
|
||||
continue
|
||||
}
|
||||
p.checkTLS(u)
|
||||
res, err := client.Get(u)
|
||||
if err != nil {
|
||||
lg("Fetching %s failed: %v.", u, err)
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
lg("Fetching %s failed: Status code %d (%s)",
|
||||
u, res.StatusCode, res.Status)
|
||||
continue
|
||||
}
|
||||
data, err := func() ([]byte, error) {
|
||||
defer res.Body.Close()
|
||||
return io.ReadAll(res.Body)
|
||||
}()
|
||||
if err != nil {
|
||||
lg("Reading %s failed: %v", u, err)
|
||||
continue
|
||||
}
|
||||
var doc interface{}
|
||||
if err := json.Unmarshal(data, &doc); err != nil {
|
||||
lg("Failed to unmarshal %s: %v", u, err)
|
||||
continue
|
||||
}
|
||||
errors, err := csaf.ValidateCSAF(doc)
|
||||
if err != nil {
|
||||
lg("Failed to validate %s: %v", u, err)
|
||||
continue
|
||||
}
|
||||
if len(errors) > 0 {
|
||||
lg("CSAF file %s has %d validation errors.", u, len(errors))
|
||||
}
|
||||
|
||||
// Check hashes
|
||||
for _, x := range []struct {
|
||||
ext string
|
||||
hash func() hash.Hash
|
||||
}{
|
||||
{"sha256", sha256.New},
|
||||
{"sha512", sha512.New},
|
||||
} {
|
||||
hashFile := u + "." + x.ext
|
||||
p.checkTLS(hashFile)
|
||||
if res, err = client.Get(hashFile); err != nil {
|
||||
p.addBadHash("Fetching %s failed: %v.", hashFile, err)
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
p.addBadHash("Fetching %s failed: Status code %d (%s)",
|
||||
hashFile, res.StatusCode, res.Status)
|
||||
continue
|
||||
}
|
||||
h, err := func() ([]byte, error) {
|
||||
defer res.Body.Close()
|
||||
return hashFromReader(res.Body)
|
||||
}()
|
||||
if err != nil {
|
||||
p.addBadHash("Reading %s failed: %v.", hashFile, err)
|
||||
continue
|
||||
}
|
||||
if len(h) == 0 {
|
||||
p.addBadHash("No hash found in %s.", hashFile)
|
||||
continue
|
||||
}
|
||||
orig := x.hash()
|
||||
if _, err := orig.Write(data); err != nil {
|
||||
p.addBadHash("%s hashing of %s failed: %v.",
|
||||
strings.ToUpper(x.ext), u, err)
|
||||
continue
|
||||
}
|
||||
if !bytes.Equal(h, orig.Sum(nil)) {
|
||||
p.addBadHash("%s hash of %s does not match %s.",
|
||||
strings.ToUpper(x.ext), u, hashFile)
|
||||
}
|
||||
}
|
||||
|
||||
// Check signature
|
||||
sigFile := u + ".asc"
|
||||
p.checkTLS(sigFile)
|
||||
|
||||
if res, err = client.Get(sigFile); err != nil {
|
||||
p.addBadSignature("Fetching %s failed: %v.", sigFile, err)
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
p.addBadSignature("Fetching %s failed: status code %d (%s)",
|
||||
sigFile, res.StatusCode, res.Status)
|
||||
continue
|
||||
}
|
||||
|
||||
sig, err := func() (*crypto.PGPSignature, error) {
|
||||
defer res.Body.Close()
|
||||
all, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return crypto.NewPGPSignatureFromArmored(string(all))
|
||||
}()
|
||||
if err != nil {
|
||||
p.addBadSignature("Loading signature from %s failed: %v.",
|
||||
sigFile, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(p.keys) > 0 {
|
||||
pm := crypto.NewPlainMessage(data)
|
||||
t := crypto.GetUnixTime()
|
||||
var verified bool
|
||||
for _, key := range p.keys {
|
||||
if err := key.VerifyDetached(pm, sig, t); err == nil {
|
||||
verified = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !verified {
|
||||
p.addBadSignature("Signature of %s could not be verified.", u)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var hexRe = regexp.MustCompile(`^([[:xdigit:]]+)`)
|
||||
|
||||
func hashFromReader(r io.Reader) ([]byte, error) {
|
||||
scanner := bufio.NewScanner(r)
|
||||
for scanner.Scan() {
|
||||
if m := hexRe.FindStringSubmatch(scanner.Text()); m != nil {
|
||||
return hex.DecodeString(m[1])
|
||||
}
|
||||
}
|
||||
return nil, scanner.Err()
|
||||
}
|
||||
|
||||
func basePath(p string) (string, error) {
|
||||
u, err := url.Parse(p)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
ep := u.EscapedPath()
|
||||
if idx := strings.LastIndexByte(ep, '/'); idx != -1 {
|
||||
ep = ep[:idx]
|
||||
}
|
||||
user := u.User.String()
|
||||
if user != "" {
|
||||
user += "@"
|
||||
}
|
||||
return u.Scheme + "://" + user + u.Host + "/" + ep, nil
|
||||
}
|
||||
|
||||
func (p *processor) processFeed(feed string, lg func(string, ...interface{})) error {
|
||||
|
||||
client := p.httpClient()
|
||||
res, err := client.Get(feed)
|
||||
if err != nil {
|
||||
lg("Cannot fetch feed %s: %v.", feed, err)
|
||||
return nil
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
lg("Fetching %s failed. Status code %d (%s)",
|
||||
feed, res.StatusCode, res.Status)
|
||||
return nil
|
||||
}
|
||||
rfeed, err := func() (*csaf.ROLIEFeed, error) {
|
||||
defer res.Body.Close()
|
||||
return csaf.LoadROLIEFeed(res.Body)
|
||||
}()
|
||||
if err != nil {
|
||||
lg("Loading ROLIE feed failed: %v.", err)
|
||||
return nil
|
||||
}
|
||||
base, err := basePath(feed)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Extract the CSAF files from feed.
|
||||
var files []string
|
||||
for _, f := range rfeed.Entry {
|
||||
for i := range f.Link {
|
||||
files = append(files, f.Link[i].HRef)
|
||||
}
|
||||
}
|
||||
return p.integrity(files, base, lg)
|
||||
}
|
||||
|
||||
func (p *processor) processFeeds(
|
||||
domain string,
|
||||
feeds [][]csaf.Feed,
|
||||
lg func(string, ...interface{}),
|
||||
) error {
|
||||
base, err := url.Parse("https://" + domain + "/.well-known/csaf/")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for i := range feeds {
|
||||
for j := range feeds[i] {
|
||||
feed := &feeds[i][j]
|
||||
if feed.URL == nil {
|
||||
continue
|
||||
}
|
||||
up, err := url.Parse(string(*feed.URL))
|
||||
if err != nil {
|
||||
lg("Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||
continue
|
||||
}
|
||||
feedURL := base.ResolveReference(up).String()
|
||||
p.checkTLS(feedURL)
|
||||
if err := p.processFeed(feedURL, lg); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *processor) checkCSAFs(domain string, lg func(string, ...interface{})) error {
|
||||
// Check for ROLIE
|
||||
rolie, err := p.jsonPath("$.distributions[*].rolie.feeds")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fs, hasRolie := rolie.([]interface{})
|
||||
hasRolie = hasRolie && len(fs) > 0
|
||||
|
||||
if hasRolie {
|
||||
var feeds [][]csaf.Feed
|
||||
if err := util.ReMarshalJSON(&feeds, rolie); err != nil {
|
||||
lg("ROLIE feeds are not compatible: %v.", err)
|
||||
return nil
|
||||
}
|
||||
if err := p.processFeeds(domain, feeds, lg); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// No rolie feeds
|
||||
// TODO: Implement me!
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type baseCheck struct {
|
||||
exec int
|
||||
num int
|
||||
|
|
@ -732,14 +319,6 @@ func (sc *signaturesCheck) run(p *processor, _ string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func reserialize(dst, src interface{}) error {
|
||||
s, err := json.Marshal(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return json.Unmarshal(s, dst)
|
||||
}
|
||||
|
||||
func (ppkc *publicPGPKeyCheck) run(p *processor, domain string) error {
|
||||
|
||||
src, err := p.jsonPath("$.pgp_keys")
|
||||
|
|
|
|||
407
cmd/csaf_checker/processor.go
Normal file
407
cmd/csaf_checker/processor.go
Normal file
|
|
@ -0,0 +1,407 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/PaesslerAG/gval"
|
||||
"github.com/PaesslerAG/jsonpath"
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
)
|
||||
|
||||
type processor struct {
|
||||
opts *options
|
||||
redirects map[string]string
|
||||
noneTLS map[string]struct{}
|
||||
alreadyChecked map[string]struct{}
|
||||
pmd256 []byte
|
||||
pmd interface{}
|
||||
builder gval.Language
|
||||
keys []*crypto.KeyRing
|
||||
badHashes []string
|
||||
badSignatures []string
|
||||
}
|
||||
|
||||
type check interface {
|
||||
executionOrder() int
|
||||
run(*processor, string) error
|
||||
report(*processor, *Domain)
|
||||
}
|
||||
|
||||
func newProcessor(opts *options) *processor {
|
||||
return &processor{
|
||||
opts: opts,
|
||||
redirects: map[string]string{},
|
||||
noneTLS: map[string]struct{}{},
|
||||
alreadyChecked: map[string]struct{}{},
|
||||
builder: gval.Full(jsonpath.Language()),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *processor) clean() {
|
||||
for k := range p.redirects {
|
||||
delete(p.redirects, k)
|
||||
}
|
||||
for k := range p.noneTLS {
|
||||
delete(p.noneTLS, k)
|
||||
}
|
||||
for k := range p.alreadyChecked {
|
||||
delete(p.alreadyChecked, k)
|
||||
}
|
||||
p.pmd256 = nil
|
||||
p.pmd = nil
|
||||
p.keys = nil
|
||||
p.badSignatures = nil
|
||||
p.badHashes = nil
|
||||
}
|
||||
|
||||
func (p *processor) run(checks []check, domains []string) (*Report, error) {
|
||||
|
||||
var report Report
|
||||
|
||||
execs := make([]check, len(checks))
|
||||
copy(execs, checks)
|
||||
sort.SliceStable(execs, func(i, j int) bool {
|
||||
return execs[i].executionOrder() < execs[j].executionOrder()
|
||||
})
|
||||
|
||||
for _, d := range domains {
|
||||
for _, ch := range execs {
|
||||
if err := ch.run(p, d); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
domain := &Domain{Name: d}
|
||||
for _, ch := range checks {
|
||||
ch.report(p, domain)
|
||||
}
|
||||
report.Domains = append(report.Domains, domain)
|
||||
p.clean()
|
||||
}
|
||||
|
||||
return &report, nil
|
||||
}
|
||||
|
||||
func (p *processor) jsonPath(expr string) (interface{}, error) {
|
||||
if p.pmd == nil {
|
||||
return nil, errors.New("no provider metadata loaded")
|
||||
}
|
||||
eval, err := p.builder.NewEvaluable(expr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return eval(context.Background(), p.pmd)
|
||||
}
|
||||
|
||||
func (p *processor) checkTLS(u string) {
|
||||
if x, err := url.Parse(u); err == nil && x.Scheme != "https" {
|
||||
p.noneTLS[u] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *processor) markChecked(s string) bool {
|
||||
if _, ok := p.alreadyChecked[s]; ok {
|
||||
return true
|
||||
}
|
||||
p.alreadyChecked[s] = struct{}{}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *processor) checkRedirect(r *http.Request, via []*http.Request) error {
|
||||
|
||||
var path strings.Builder
|
||||
for i, v := range via {
|
||||
if i > 0 {
|
||||
path.WriteString(", ")
|
||||
}
|
||||
path.WriteString(v.URL.String())
|
||||
}
|
||||
url := r.URL.String()
|
||||
p.checkTLS(url)
|
||||
p.redirects[url] = path.String()
|
||||
|
||||
if len(via) > 10 {
|
||||
return errors.New("Too many redirections")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *processor) httpClient() *http.Client {
|
||||
client := http.Client{
|
||||
CheckRedirect: p.checkRedirect,
|
||||
}
|
||||
|
||||
if p.opts.Insecure {
|
||||
client.Transport = &http.Transport{
|
||||
TLSClientConfig: &tls.Config{
|
||||
InsecureSkipVerify: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return &client
|
||||
}
|
||||
|
||||
func (p *processor) addBadHash(format string, args ...interface{}) {
|
||||
p.badHashes = append(p.badHashes, fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
func (p *processor) addBadSignature(format string, args ...interface{}) {
|
||||
p.badSignatures = append(p.badSignatures, fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
func (p *processor) integrity(
|
||||
files []string,
|
||||
base string,
|
||||
lg func(string, ...interface{}),
|
||||
) error {
|
||||
b, err := url.Parse(base)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
client := p.httpClient()
|
||||
for _, f := range files {
|
||||
fp, err := url.Parse(f)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
u := b.ResolveReference(fp).String()
|
||||
if p.markChecked(u) {
|
||||
continue
|
||||
}
|
||||
p.checkTLS(u)
|
||||
res, err := client.Get(u)
|
||||
if err != nil {
|
||||
lg("Fetching %s failed: %v.", u, err)
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
lg("Fetching %s failed: Status code %d (%s)",
|
||||
u, res.StatusCode, res.Status)
|
||||
continue
|
||||
}
|
||||
data, err := func() ([]byte, error) {
|
||||
defer res.Body.Close()
|
||||
return io.ReadAll(res.Body)
|
||||
}()
|
||||
if err != nil {
|
||||
lg("Reading %s failed: %v", u, err)
|
||||
continue
|
||||
}
|
||||
var doc interface{}
|
||||
if err := json.Unmarshal(data, &doc); err != nil {
|
||||
lg("Failed to unmarshal %s: %v", u, err)
|
||||
continue
|
||||
}
|
||||
errors, err := csaf.ValidateCSAF(doc)
|
||||
if err != nil {
|
||||
lg("Failed to validate %s: %v", u, err)
|
||||
continue
|
||||
}
|
||||
if len(errors) > 0 {
|
||||
lg("CSAF file %s has %d validation errors.", u, len(errors))
|
||||
}
|
||||
|
||||
// Check hashes
|
||||
for _, x := range []struct {
|
||||
ext string
|
||||
hash func() hash.Hash
|
||||
}{
|
||||
{"sha256", sha256.New},
|
||||
{"sha512", sha512.New},
|
||||
} {
|
||||
hashFile := u + "." + x.ext
|
||||
p.checkTLS(hashFile)
|
||||
if res, err = client.Get(hashFile); err != nil {
|
||||
p.addBadHash("Fetching %s failed: %v.", hashFile, err)
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
p.addBadHash("Fetching %s failed: Status code %d (%s)",
|
||||
hashFile, res.StatusCode, res.Status)
|
||||
continue
|
||||
}
|
||||
h, err := func() ([]byte, error) {
|
||||
defer res.Body.Close()
|
||||
return hashFromReader(res.Body)
|
||||
}()
|
||||
if err != nil {
|
||||
p.addBadHash("Reading %s failed: %v.", hashFile, err)
|
||||
continue
|
||||
}
|
||||
if len(h) == 0 {
|
||||
p.addBadHash("No hash found in %s.", hashFile)
|
||||
continue
|
||||
}
|
||||
orig := x.hash()
|
||||
if _, err := orig.Write(data); err != nil {
|
||||
p.addBadHash("%s hashing of %s failed: %v.",
|
||||
strings.ToUpper(x.ext), u, err)
|
||||
continue
|
||||
}
|
||||
if !bytes.Equal(h, orig.Sum(nil)) {
|
||||
p.addBadHash("%s hash of %s does not match %s.",
|
||||
strings.ToUpper(x.ext), u, hashFile)
|
||||
}
|
||||
}
|
||||
|
||||
// Check signature
|
||||
sigFile := u + ".asc"
|
||||
p.checkTLS(sigFile)
|
||||
|
||||
if res, err = client.Get(sigFile); err != nil {
|
||||
p.addBadSignature("Fetching %s failed: %v.", sigFile, err)
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
p.addBadSignature("Fetching %s failed: status code %d (%s)",
|
||||
sigFile, res.StatusCode, res.Status)
|
||||
continue
|
||||
}
|
||||
|
||||
sig, err := func() (*crypto.PGPSignature, error) {
|
||||
defer res.Body.Close()
|
||||
all, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return crypto.NewPGPSignatureFromArmored(string(all))
|
||||
}()
|
||||
if err != nil {
|
||||
p.addBadSignature("Loading signature from %s failed: %v.",
|
||||
sigFile, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(p.keys) > 0 {
|
||||
pm := crypto.NewPlainMessage(data)
|
||||
t := crypto.GetUnixTime()
|
||||
var verified bool
|
||||
for _, key := range p.keys {
|
||||
if err := key.VerifyDetached(pm, sig, t); err == nil {
|
||||
verified = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !verified {
|
||||
p.addBadSignature("Signature of %s could not be verified.", u)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *processor) processFeed(feed string, lg func(string, ...interface{})) error {
|
||||
|
||||
client := p.httpClient()
|
||||
res, err := client.Get(feed)
|
||||
if err != nil {
|
||||
lg("Cannot fetch feed %s: %v.", feed, err)
|
||||
return nil
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
lg("Fetching %s failed. Status code %d (%s)",
|
||||
feed, res.StatusCode, res.Status)
|
||||
return nil
|
||||
}
|
||||
rfeed, err := func() (*csaf.ROLIEFeed, error) {
|
||||
defer res.Body.Close()
|
||||
return csaf.LoadROLIEFeed(res.Body)
|
||||
}()
|
||||
if err != nil {
|
||||
lg("Loading ROLIE feed failed: %v.", err)
|
||||
return nil
|
||||
}
|
||||
base, err := basePath(feed)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Extract the CSAF files from feed.
|
||||
var files []string
|
||||
for _, f := range rfeed.Entry {
|
||||
for i := range f.Link {
|
||||
files = append(files, f.Link[i].HRef)
|
||||
}
|
||||
}
|
||||
return p.integrity(files, base, lg)
|
||||
}
|
||||
|
||||
func (p *processor) processFeeds(
|
||||
domain string,
|
||||
feeds [][]csaf.Feed,
|
||||
lg func(string, ...interface{}),
|
||||
) error {
|
||||
base, err := url.Parse("https://" + domain + "/.well-known/csaf/")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for i := range feeds {
|
||||
for j := range feeds[i] {
|
||||
feed := &feeds[i][j]
|
||||
if feed.URL == nil {
|
||||
continue
|
||||
}
|
||||
up, err := url.Parse(string(*feed.URL))
|
||||
if err != nil {
|
||||
lg("Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||
continue
|
||||
}
|
||||
feedURL := base.ResolveReference(up).String()
|
||||
p.checkTLS(feedURL)
|
||||
if err := p.processFeed(feedURL, lg); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *processor) checkCSAFs(domain string, lg func(string, ...interface{})) error {
|
||||
// Check for ROLIE
|
||||
rolie, err := p.jsonPath("$.distributions[*].rolie.feeds")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fs, hasRolie := rolie.([]interface{})
|
||||
hasRolie = hasRolie && len(fs) > 0
|
||||
|
||||
if hasRolie {
|
||||
var feeds [][]csaf.Feed
|
||||
if err := util.ReMarshalJSON(&feeds, rolie); err != nil {
|
||||
lg("ROLIE feeds are not compatible: %v.", err)
|
||||
return nil
|
||||
}
|
||||
if err := p.processFeeds(domain, feeds, lg); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// No rolie feeds
|
||||
// TODO: Implement me!
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
46
cmd/csaf_checker/util.go
Normal file
46
cmd/csaf_checker/util.go
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var hexRe = regexp.MustCompile(`^([[:xdigit:]]+)`)
|
||||
|
||||
func hashFromReader(r io.Reader) ([]byte, error) {
|
||||
scanner := bufio.NewScanner(r)
|
||||
for scanner.Scan() {
|
||||
if m := hexRe.FindStringSubmatch(scanner.Text()); m != nil {
|
||||
return hex.DecodeString(m[1])
|
||||
}
|
||||
}
|
||||
return nil, scanner.Err()
|
||||
}
|
||||
|
||||
func basePath(p string) (string, error) {
|
||||
u, err := url.Parse(p)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
ep := u.EscapedPath()
|
||||
if idx := strings.LastIndexByte(ep, '/'); idx != -1 {
|
||||
ep = ep[:idx]
|
||||
}
|
||||
user := u.User.String()
|
||||
if user != "" {
|
||||
user += "@"
|
||||
}
|
||||
return u.Scheme + "://" + user + u.Host + "/" + ep, nil
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue