1
0
Fork 0
mirror of https://github.com/gocsaf/csaf.git synced 2025-12-22 11:55:40 +01:00

Add aggregator; improve itest workflow

* Factor JSON evaluation and  construction base URLs out of of checker.
* Move json path matching to util.
* Add csaf_aggregator (as additional command)
* Improve itest workflow to checkout the branch where it is running on.

resolve #105
resolve  #72

Co-authored-by: tschmidtb51 <65305130+tschmidtb51@users.noreply.github.com>
Co-authored-by: Bernhard Reiter <bernhard@intevation.de>
Co-authored-by: Fadi Abbud <fadi.abbud@intevation.de>
This commit is contained in:
Sascha L. Teichmann 2022-05-10 18:12:38 +02:00 committed by GitHub
parent 9da0589236
commit 8a1ebe0b7a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
30 changed files with 2789 additions and 88 deletions

View file

@ -11,11 +11,14 @@ jobs:
with:
go-version: 1.17
- name: Checkout
uses: actions/checkout@v3
- name: Execute the scripts
run: |
sudo apt install -y make nginx fcgiwrap gnutls-bin
cp -r $GITHUB_WORKSPACE ~
cd ~
git clone https://github.com/csaf-poc/csaf_distribution.git
cd csaf_distribution/docs/scripts/
env FOLDERNAME=devca1 ORGANAME="CSAF Tools Development (internal)" ./TLSConfigsForITest.sh
env FOLDERNAME=devca1 ORGANAME="CSAF Tools Development (internal)" ./TLSClientConfigsForITest.sh

View file

@ -18,3 +18,4 @@
| golang.org/x/sys | BSD-3-Clause |
| golang.org/x/term | BSD-3-Clause |
| golang.org/x/text | BSD-3-Clause |
| github.com/gofrs/flock | BSD-3-Clause |

View file

@ -0,0 +1,72 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"context"
"errors"
"io"
"net/http"
"net/url"
"golang.org/x/time/rate"
)
type client interface {
Do(req *http.Request) (*http.Response, error)
Get(url string) (*http.Response, error)
Head(url string) (*http.Response, error)
Post(url, contentType string, body io.Reader) (*http.Response, error)
PostForm(url string, data url.Values) (*http.Response, error)
}
type limitingClient struct {
client
limiter *rate.Limiter
}
func (lc *limitingClient) Do(req *http.Request) (*http.Response, error) {
lc.limiter.Wait(context.Background())
return lc.client.Do(req)
}
func (lc *limitingClient) Get(url string) (*http.Response, error) {
lc.limiter.Wait(context.Background())
return lc.client.Get(url)
}
func (lc *limitingClient) Head(url string) (*http.Response, error) {
lc.limiter.Wait(context.Background())
return lc.client.Head(url)
}
func (lc *limitingClient) Post(url, contentType string, body io.Reader) (*http.Response, error) {
lc.limiter.Wait(context.Background())
return lc.client.Post(url, contentType, body)
}
func (lc *limitingClient) PostForm(url string, data url.Values) (*http.Response, error) {
lc.limiter.Wait(context.Background())
return lc.client.PostForm(url, data)
}
var errNotFound = errors.New("not found")
func downloadJSON(c client, url string, found func(io.Reader) error) error {
res, err := c.Get(url)
if err != nil || res.StatusCode != http.StatusOK ||
res.Header.Get("Content-Type") != "application/json" {
// ignore this as it is expected.
return errNotFound
}
return func() error {
defer res.Body.Close()
return found(res.Body)
}()
}

View file

@ -0,0 +1,217 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"crypto/tls"
"errors"
"fmt"
"net/http"
"os"
"runtime"
"strings"
"sync"
"github.com/BurntSushi/toml"
"github.com/ProtonMail/gopenpgp/v2/crypto"
"github.com/csaf-poc/csaf_distribution/csaf"
"golang.org/x/time/rate"
)
const (
defaultConfigPath = "aggregator.toml"
defaultWorkers = 10
defaultFolder = "/var/www"
defaultWeb = "/var/www/html"
defaultDomain = "https://example.com"
defaultOpenPGPURL = "https://openpgp.circl.lu/pks/lookup?op=get&search=${FINGERPRINT}" // Default OpenPGP URL.
)
type provider struct {
Name string `toml:"name"`
Domain string `toml:"domain"`
// Rate gives the provider specific rate limiting (see overall Rate).
Rate *float64 `toml:"rate"`
Insecure *bool `toml:"insecure"`
}
type config struct {
// Workers is the number of concurrently executed workers for downloading.
Workers int `toml:"workers"`
Folder string `toml:"folder"`
Web string `toml:"web"`
Domain string `toml:"domain"`
// Rate gives the average upper limit of https operations per second.
Rate *float64 `toml:"rate"`
Insecure *bool `toml:"insecure"`
Aggregator csaf.AggregatorInfo `toml:"aggregator"`
Providers []*provider `toml:"providers"`
Key string `toml:"key"`
OpenPGPURL string `toml:"openpgp_url"`
Passphrase *string `toml:"passphrase"`
AllowSingleProvider bool `toml:"allow_single_provider"`
// LockFile tries to lock to a given file.
LockFile *string `toml:"lock_file"`
// Interim performs an interim scan.
Interim bool `toml:"interim"`
// InterimYears is numbers numbers of years to look back
// for interim advisories. Less/equal zero means forever.
InterimYears int `toml:"interim_years"`
keyMu sync.Mutex
key *crypto.Key
keyErr error
}
// runAsMirror determines if the aggregator should run in mirror mode.
func (c *config) runAsMirror() bool {
return c.Aggregator.Category != nil &&
*c.Aggregator.Category == csaf.AggregatorAggregator
}
func (c *config) GetOpenPGPURL(key *crypto.Key) string {
if key == nil {
return c.OpenPGPURL
}
return strings.NewReplacer(
"${FINGERPRINT}", "0x"+key.GetFingerprint(),
"${KEY_ID}", "0x"+key.GetHexKeyID()).Replace(c.OpenPGPURL)
}
func (c *config) cryptoKey() (*crypto.Key, error) {
if c.Key == "" {
return nil, nil
}
c.keyMu.Lock()
defer c.keyMu.Unlock()
if c.key != nil || c.keyErr != nil {
return c.key, c.keyErr
}
var f *os.File
if f, c.keyErr = os.Open(c.Key); c.keyErr != nil {
return nil, c.keyErr
}
defer f.Close()
c.key, c.keyErr = crypto.NewKeyFromArmoredReader(f)
return c.key, c.keyErr
}
func (c *config) httpClient(p *provider) client {
client := http.Client{}
if p.Insecure != nil && *p.Insecure || c.Insecure != nil && *c.Insecure {
client.Transport = &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: true,
},
}
}
if p.Rate == nil && c.Rate == nil {
return &client
}
var r float64
if c.Rate != nil {
r = *c.Rate
}
if p.Rate != nil {
r = *p.Rate
}
return &limitingClient{
client: &client,
limiter: rate.NewLimiter(rate.Limit(r), 1),
}
}
func (c *config) checkProviders() error {
if !c.AllowSingleProvider && len(c.Providers) < 2 {
return errors.New("need at least two providers")
}
already := make(map[string]bool)
for _, p := range c.Providers {
if p.Name == "" {
return errors.New("no name given for provider")
}
if p.Domain == "" {
return errors.New("no domain given for provider")
}
if already[p.Name] {
return fmt.Errorf("provider '%s' is configured more than once", p.Name)
}
already[p.Name] = true
}
return nil
}
func (c *config) setDefaults() {
if c.Folder == "" {
c.Folder = defaultFolder
}
if c.Web == "" {
c.Web = defaultWeb
}
if c.Domain == "" {
c.Domain = defaultDomain
}
if c.OpenPGPURL == "" {
c.OpenPGPURL = defaultOpenPGPURL
}
if c.Workers <= 0 {
if n := runtime.NumCPU(); n > defaultWorkers {
c.Workers = defaultWorkers
} else {
c.Workers = n
}
}
if c.Workers > len(c.Providers) {
c.Workers = len(c.Providers)
}
}
func (c *config) check() error {
if len(c.Providers) == 0 {
return errors.New("no providers given in configuration")
}
if err := c.Aggregator.Validate(); err != nil {
return err
}
return c.checkProviders()
}
func loadConfig(path string) (*config, error) {
if path == "" {
path = defaultConfigPath
}
var cfg config
if _, err := toml.DecodeFile(path, &cfg); err != nil {
return nil, err
}
cfg.setDefaults()
if err := cfg.check(); err != nil {
return nil, err
}
return &cfg, nil
}

View file

@ -0,0 +1,7 @@
// csaf_aggregator is an implementation of the role CSAF Aggregator of the
// CSAF 2.0 specification
// (https://docs.oasis-open.org/csaf/csaf/v2.0/csd02/csaf-v2.0-csd02.html)
//
// TODO: To be called periodically, e.g with cron
package main

View file

@ -0,0 +1,38 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"fmt"
"os"
)
// writeHash writes a hash to file.
func writeHash(fname, name string, hash []byte) error {
f, err := os.Create(fname)
if err != nil {
return err
}
fmt.Fprintf(f, "%x %s\n", hash, name)
return f.Close()
}
// writeFileHashes writes a file and its hashes to files.
func writeFileHashes(fname, name string, data, s256, s512 []byte) error {
// Write the file itself.
if err := os.WriteFile(fname, data, 0644); err != nil {
return err
}
// Write SHA256 sum.
if err := writeHash(fname+".sha256", name, s256); err != nil {
return err
}
// Write SHA512 sum.
return writeHash(fname+".sha512", name, s512)
}

169
cmd/csaf_aggregator/full.go Normal file
View file

@ -0,0 +1,169 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"errors"
"fmt"
"log"
"os"
"path/filepath"
"sync"
"time"
"github.com/csaf-poc/csaf_distribution/csaf"
"github.com/csaf-poc/csaf_distribution/util"
)
type fullJob struct {
provider *provider
aggregatorProvider *csaf.AggregatorCSAFProvider
err error
}
// setupProviderFull fetches the provider-metadate.json for a specific provider.
func (w *worker) setupProviderFull(provider *provider) error {
log.Printf("worker #%d: %s (%s)\n",
w.num, provider.Name, provider.Domain)
w.dir = ""
w.provider = provider
// Each job needs a separate client.
w.client = w.cfg.httpClient(provider)
// We need the provider metadata in all cases.
if err := w.locateProviderMetadata(provider.Domain); err != nil {
return err
}
// Validate the provider metadata.
errors, err := csaf.ValidateProviderMetadata(w.metadataProvider)
if err != nil {
return err
}
if len(errors) > 0 {
return fmt.Errorf(
"provider-metadata.json has %d validation issues", len(errors))
}
log.Printf("provider-metadata: %s\n", w.loc)
return nil
}
// fullWorkFunc implements the actual work (mirror/list).
type fullWorkFunc func(*worker) (*csaf.AggregatorCSAFProvider, error)
// fullWork handles the treatment of providers concurrently.
func (w *worker) fullWork(
wg *sync.WaitGroup,
doWork fullWorkFunc,
jobs <-chan *fullJob,
) {
defer wg.Done()
for j := range jobs {
if err := w.setupProviderFull(j.provider); err != nil {
j.err = err
continue
}
j.aggregatorProvider, j.err = doWork(w)
}
}
// full performs the complete lister/download
func (p *processor) full() error {
var doWork fullWorkFunc
if p.cfg.runAsMirror() {
doWork = (*worker).mirror
log.Println("Running in aggregator mode")
} else {
doWork = (*worker).lister
log.Println("Running in lister mode")
}
queue := make(chan *fullJob)
var wg sync.WaitGroup
log.Printf("Starting %d workers.\n", p.cfg.Workers)
for i := 1; i <= p.cfg.Workers; i++ {
wg.Add(1)
w := newWorker(i, p.cfg)
go w.fullWork(&wg, doWork, queue)
}
jobs := make([]fullJob, len(p.cfg.Providers))
for i, p := range p.cfg.Providers {
jobs[i] = fullJob{provider: p}
queue <- &jobs[i]
}
close(queue)
wg.Wait()
// Assemble aggregator data structure.
csafProviders := make([]*csaf.AggregatorCSAFProvider, 0, len(jobs))
for i := range jobs {
j := &jobs[i]
if j.err != nil {
log.Printf("error: '%s' failed: %v\n", j.provider.Name, j.err)
continue
}
if j.aggregatorProvider == nil {
log.Printf(
"error: '%s' does not produce any result.\n", j.provider.Name)
continue
}
csafProviders = append(csafProviders, j.aggregatorProvider)
}
if len(csafProviders) == 0 {
return errors.New("all jobs failed, stopping")
}
version := csaf.AggregatorVersion20
canonicalURL := csaf.AggregatorURL(
p.cfg.Domain + "/.well-known/csaf-aggregator/aggregator.json")
lastUpdated := csaf.TimeStamp(time.Now())
agg := csaf.Aggregator{
Aggregator: &p.cfg.Aggregator,
Version: &version,
CanonicalURL: &canonicalURL,
CSAFProviders: csafProviders,
LastUpdated: &lastUpdated,
}
web := filepath.Join(p.cfg.Web, ".well-known", "csaf-aggregator")
dstName := filepath.Join(web, "aggregator.json")
fname, file, err := util.MakeUniqFile(dstName + ".tmp")
if err != nil {
return err
}
if _, err := agg.WriteTo(file); err != nil {
file.Close()
os.RemoveAll(fname)
return err
}
if err := file.Close(); err != nil {
return err
}
return os.Rename(fname, dstName)
}

View file

@ -0,0 +1,250 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"bufio"
"encoding/csv"
"fmt"
"log"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"time"
"github.com/csaf-poc/csaf_distribution/csaf"
"github.com/csaf-poc/csaf_distribution/util"
)
func (w *worker) writeInterims(label string, summaries []summary) error {
// Filter out the interims.
var ss []summary
for _, s := range summaries {
if s.summary.Status == "interim" {
ss = append(ss, s)
}
}
// No interims -> nothing to write
if len(ss) == 0 {
return nil
}
sort.SliceStable(ss, func(i, j int) bool {
return ss[i].summary.CurrentReleaseDate.After(
ss[j].summary.CurrentReleaseDate)
})
fname := filepath.Join(w.dir, label, "interim.csv")
f, err := os.Create(fname)
if err != nil {
return err
}
out := csv.NewWriter(f)
record := make([]string, 3)
for i := range ss {
s := &ss[i]
record[0] =
s.summary.CurrentReleaseDate.Format(time.RFC3339)
record[1] =
strconv.Itoa(s.summary.InitialReleaseDate.Year()) + "/" + s.filename
record[2] = s.url
if err := out.Write(record); err != nil {
f.Close()
return err
}
}
out.Flush()
err1 := out.Error()
err2 := f.Close()
if err1 != nil {
return err1
}
return err2
}
func (w *worker) writeCSV(label string, summaries []summary) error {
// Do not sort in-place.
ss := make([]summary, len(summaries))
copy(ss, summaries)
sort.SliceStable(ss, func(i, j int) bool {
return ss[i].summary.CurrentReleaseDate.After(
ss[j].summary.CurrentReleaseDate)
})
fname := filepath.Join(w.dir, label, "changes.csv")
f, err := os.Create(fname)
if err != nil {
return err
}
out := csv.NewWriter(f)
record := make([]string, 2)
for i := range ss {
s := &ss[i]
record[0] =
s.summary.CurrentReleaseDate.Format(time.RFC3339)
record[1] =
strconv.Itoa(s.summary.InitialReleaseDate.Year()) + "/" + s.filename
if err := out.Write(record); err != nil {
f.Close()
return err
}
}
out.Flush()
err1 := out.Error()
err2 := f.Close()
if err1 != nil {
return err1
}
return err2
}
func (w *worker) writeIndex(label string, summaries []summary) error {
fname := filepath.Join(w.dir, label, "index.txt")
f, err := os.Create(fname)
if err != nil {
return err
}
out := bufio.NewWriter(f)
for i := range summaries {
s := &summaries[i]
fmt.Fprintf(
out, "%d/%s\n",
s.summary.InitialReleaseDate.Year(),
s.filename)
}
err1 := out.Flush()
err2 := f.Close()
if err1 != nil {
return err1
}
return err2
}
func (w *worker) writeROLIE(label string, summaries []summary) error {
fname := "csaf-feed-tlp-" + strings.ToLower(label) + ".json"
feedURL := w.cfg.Domain + "/.well-known/csaf-aggregator/" +
w.provider.Name + "/" + fname
entries := make([]*csaf.Entry, len(summaries))
format := csaf.Format{
Schema: "https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json",
Version: "2.0",
}
for i := range summaries {
s := &summaries[i]
csafURL := w.cfg.Domain + "/.well-known/csaf-aggregator/" +
w.provider.Name + "/" + label + "/" +
strconv.Itoa(s.summary.InitialReleaseDate.Year()) + "/" +
s.filename
entries[i] = &csaf.Entry{
ID: s.summary.ID,
Titel: s.summary.Title,
Published: csaf.TimeStamp(s.summary.InitialReleaseDate),
Updated: csaf.TimeStamp(s.summary.CurrentReleaseDate),
Link: []csaf.Link{{
Rel: "self",
HRef: csafURL,
}},
Format: format,
Content: csaf.Content{
Type: "application/json",
Src: csafURL,
},
}
if s.summary.Summary != "" {
entries[i].Summary = &csaf.Summary{
Content: s.summary.Summary,
}
}
}
rolie := &csaf.ROLIEFeed{
Feed: csaf.FeedData{
ID: "csaf-feed-tlp-" + strings.ToLower(label),
Title: "CSAF feed (TLP:" + strings.ToUpper(label) + ")",
Link: []csaf.Link{{
Rel: "rel",
HRef: feedURL,
}},
Updated: csaf.TimeStamp(time.Now()),
Entry: entries,
},
}
// Sort by descending updated order.
rolie.SortEntriesByUpdated()
path := filepath.Join(w.dir, fname)
return util.WriteToFile(path, rolie)
}
func (w *worker) writeIndices() error {
if len(w.summaries) == 0 || w.dir == "" {
return nil
}
for label, summaries := range w.summaries {
log.Printf("%s: %d\n", label, len(summaries))
if err := w.writeInterims(label, summaries); err != nil {
return err
}
if err := w.writeCSV(label, summaries); err != nil {
return err
}
if err := w.writeIndex(label, summaries); err != nil {
return err
}
if err := w.writeROLIE(label, summaries); err != nil {
return err
}
}
return nil
}
// loadIndex loads baseURL/index.txt and returns a list of files
// prefixed by baseURL/.
func (w *worker) loadIndex(baseURL string) ([]string, error) {
indexURL := baseURL + "/index.txt"
resp, err := w.client.Get(indexURL)
if err != nil {
return nil, err
}
defer resp.Body.Close()
var lines []string
scanner := bufio.NewScanner(resp.Body)
for scanner.Scan() {
lines = append(lines, baseURL+"/"+scanner.Text())
}
if err := scanner.Err(); err != nil {
return nil, err
}
return lines, nil
}

View file

@ -0,0 +1,380 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"bytes"
"crypto/sha256"
"crypto/sha512"
"encoding/csv"
"encoding/json"
"errors"
"fmt"
"io"
"log"
"net/http"
"os"
"path/filepath"
"strings"
"sync"
"time"
"github.com/csaf-poc/csaf_distribution/csaf"
"github.com/csaf-poc/csaf_distribution/util"
)
type interimJob struct {
provider *provider
err error
}
func (w *worker) checkInterims(
tx *lazyTransaction,
label string,
interims [][2]string,
) ([]string, error) {
var data bytes.Buffer
labelPath := filepath.Join(tx.Src(), label)
// advisories which are not interim any longer.
var finalized []string
for _, interim := range interims {
local := filepath.Join(labelPath, interim[0])
url := interim[1]
// Load local SHA256 of the advisory
localHash, err := util.HashFromFile(local + ".sha256")
if err != nil {
return nil, nil
}
res, err := w.client.Get(url)
if err != nil {
return nil, err
}
if res.StatusCode != http.StatusOK {
return nil, fmt.Errorf("fetching %s failed: Status code %d (%s)",
url, res.StatusCode, res.Status)
}
s256 := sha256.New()
data.Reset()
hasher := io.MultiWriter(s256, &data)
var doc interface{}
if err := func() error {
defer res.Body.Close()
tee := io.TeeReader(res.Body, hasher)
return json.NewDecoder(tee).Decode(&doc)
}(); err != nil {
return nil, err
}
remoteHash := s256.Sum(nil)
// If the hashes are equal then we can ignore this advisory.
if bytes.Equal(localHash, remoteHash) {
continue
}
errors, err := csaf.ValidateCSAF(doc)
if err != nil {
return nil, fmt.Errorf("failed to validate %s: %v", url, err)
}
// XXX: Should we return an error here?
for _, e := range errors {
log.Printf("validation error: %s: %v\n", url, e)
}
// We need to write the changed content.
// This will start the transcation if not already started.
dst, err := tx.Dst()
if err != nil {
return nil, err
}
// Overwrite in the cloned folder.
nlocal := filepath.Join(dst, label, interim[0])
bytes := data.Bytes()
if err := os.WriteFile(nlocal, bytes, 0644); err != nil {
return nil, err
}
name := filepath.Base(nlocal)
if err := util.WriteHashToFile(
nlocal+".sha512", name, sha512.New(), bytes,
); err != nil {
return nil, err
}
if err := util.WriteHashSumToFile(
nlocal+".sha256", name, remoteHash,
); err != nil {
return nil, err
}
// Download the signature
sigURL := url + ".asc"
ascFile := nlocal + ".asc"
// Download the signature or sign it our self.
if err := w.downloadSignatureOrSign(sigURL, ascFile, bytes); err != nil {
return nil, err
}
}
return finalized, nil
}
// setupProviderInterim prepares the worker for a specific provider.
func (w *worker) setupProviderInterim(provider *provider) {
log.Printf("worker #%d: %s (%s)\n",
w.num, provider.Name, provider.Domain)
w.dir = ""
w.provider = provider
// Each job needs a separate client.
w.client = w.cfg.httpClient(provider)
}
func (w *worker) interimWork(wg *sync.WaitGroup, jobs <-chan *interimJob) {
defer wg.Done()
path := filepath.Join(w.cfg.Web, ".well-known", "csaf-aggregator")
for j := range jobs {
w.setupProviderInterim(j.provider)
providerPath := filepath.Join(path, j.provider.Name)
j.err = func() error {
tx := newLazyTransaction(providerPath, w.cfg.Folder)
defer tx.rollback()
// Try all the labels
for _, label := range []string{
csaf.TLPLabelUnlabeled,
csaf.TLPLabelWhite,
csaf.TLPLabelGreen,
csaf.TLPLabelAmber,
csaf.TLPLabelRed,
} {
label = strings.ToLower(label)
labelPath := filepath.Join(providerPath, label)
interimsCSV := filepath.Join(labelPath, "interims.csv")
interims, err := readInterims(
interimsCSV, w.cfg.InterimYears)
if err != nil {
return err
}
// no interims found -> next label.
if len(interims) == 0 {
continue
}
// Compare locals against remotes.
finalized, err := w.checkInterims(tx, label, interims)
if err != nil {
return err
}
if len(finalized) > 0 {
// We want to write in the transaction folder.
dst, err := tx.Dst()
if err != nil {
return err
}
interimsCSV := filepath.Join(dst, label, "interims.csv")
if err := writeInterims(interimsCSV, finalized); err != nil {
return err
}
}
}
return tx.commit()
}()
}
}
// joinErrors creates an aggregated error of the messages
// of the given errors.
func joinErrors(errs []error) error {
if len(errs) == 0 {
return nil
}
var b strings.Builder
for i, err := range errs {
if i > 0 {
b.WriteString(", ")
}
b.WriteString(err.Error())
}
return errors.New(b.String())
}
// interim performs the short interim check/update.
func (p *processor) interim() error {
if !p.cfg.runAsMirror() {
return errors.New("iterim in lister mode does not work")
}
queue := make(chan *interimJob)
var wg sync.WaitGroup
log.Printf("Starting %d workers.\n", p.cfg.Workers)
for i := 1; i <= p.cfg.Workers; i++ {
wg.Add(1)
w := newWorker(i, p.cfg)
go w.interimWork(&wg, queue)
}
jobs := make([]interimJob, len(p.cfg.Providers))
for i, p := range p.cfg.Providers {
jobs[i] = interimJob{provider: p}
queue <- &jobs[i]
}
close(queue)
wg.Wait()
var errs []error
for i := range jobs {
if err := jobs[i].err; err != nil {
errs = append(errs, err)
}
}
return joinErrors(errs)
}
func writeInterims(interimsCSV string, finalized []string) error {
// In case this is a longer list (unlikely).
removed := make(map[string]bool, len(finalized))
for _, f := range finalized {
removed[f] = true
}
lines, err := func() ([][]string, error) {
interimsF, err := os.Open(interimsCSV)
if err != nil {
return nil, err
}
defer interimsF.Close()
c := csv.NewReader(interimsF)
c.FieldsPerRecord = 3
var lines [][]string
for {
record, err := c.Read()
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
// If not finalized it survives
if !removed[record[1]] {
lines = append(lines, record)
}
}
return lines, nil
}()
if err != nil {
return err
}
// All interims are finalized now -> remove file.
if len(lines) == 0 {
return os.RemoveAll(interimsCSV)
}
// Overwrite old. It's save because we are in a transaction.
f, err := os.Create(interimsCSV)
if err != nil {
return err
}
c := csv.NewWriter(f)
if err := c.WriteAll(lines); err != nil {
return f.Close()
}
c.Flush()
err1 := c.Error()
err2 := f.Close()
if err1 != nil {
return err1
}
return err2
}
// readInterims scans a interims.csv file for matching
// iterim advisories. Its sorted with youngest
// first, so we can stop scanning if entries get too old.
func readInterims(interimsCSV string, years int) ([][2]string, error) {
var tooOld func(time.Time) bool
if years <= 0 {
tooOld = func(time.Time) bool { return false }
} else {
from := time.Now().AddDate(-years, 0, 0)
tooOld = func(t time.Time) bool { return t.Before(from) }
}
interimsF, err := os.Open(interimsCSV)
if err != nil {
// None existing file -> no interims.
if os.IsNotExist(err) {
return nil, nil
}
return nil, err
}
defer interimsF.Close()
c := csv.NewReader(interimsF)
c.FieldsPerRecord = 3
var files [][2]string
for {
record, err := c.Read()
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
t, err := time.Parse(time.RFC3339, record[0])
if err != nil {
return nil, err
}
if tooOld(t) {
break
}
files = append(files, [2]string{record[1], record[2]})
}
return files, nil
}

View file

@ -0,0 +1,86 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"os"
"path/filepath"
"github.com/csaf-poc/csaf_distribution/util"
)
type lazyTransaction struct {
src string
dstDir string
dst string
}
func newLazyTransaction(src, dstDir string) *lazyTransaction {
return &lazyTransaction{
src: src,
dstDir: dstDir,
}
}
func (lt *lazyTransaction) Src() string {
return lt.src
}
func (lt *lazyTransaction) Dst() (string, error) {
if lt.dst != "" {
return lt.dst, nil
}
srcBase := filepath.Base(lt.src)
folder := filepath.Join(lt.dstDir, srcBase)
dst, err := util.MakeUniqDir(folder)
if err != nil {
return "", err
}
// Copy old content into new.
if err := util.DeepCopy(lt.dst, lt.src); err != nil {
os.RemoveAll(dst)
return "", err
}
lt.dst = dst
return dst, nil
}
func (lt *lazyTransaction) rollback() error {
if lt.dst == "" {
return nil
}
err := os.RemoveAll(lt.dst)
lt.dst = ""
return err
}
func (lt *lazyTransaction) commit() error {
if lt.dst == "" {
return nil
}
defer func() { lt.dst = "" }()
// Switch directories.
symlink := filepath.Join(lt.dstDir, filepath.Base(lt.src))
if err := os.Symlink(lt.dstDir, symlink); err != nil {
os.RemoveAll(lt.dstDir)
return err
}
if err := os.Rename(symlink, lt.src); err != nil {
os.RemoveAll(lt.dstDir)
return err
}
return os.RemoveAll(lt.src)
}

View file

@ -0,0 +1,34 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"fmt"
"github.com/csaf-poc/csaf_distribution/csaf"
"github.com/csaf-poc/csaf_distribution/util"
)
// mirrorAllowed checks if mirroring is allowed.
func (w *worker) listAllowed() bool {
var b bool
return w.expr.Extract(
`$.list_on_CSAF_aggregators`,
util.BoolMatcher(&b), false, w.metadataProvider) == nil && b
}
func (w *worker) lister() (*csaf.AggregatorCSAFProvider, error) {
// Check if we are allowed to mirror this domain.
if !w.listAllowed() {
return nil, fmt.Errorf(
"no listing of '%s' allowed", w.provider.Name)
}
return w.createAggregatorProvider()
}

View file

@ -0,0 +1,76 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"fmt"
"log"
"os"
"github.com/csaf-poc/csaf_distribution/util"
"github.com/gofrs/flock"
"github.com/jessevdk/go-flags"
)
type options struct {
Config string `short:"c" long:"config" description:"File name of the configuration file" value-name:"CFG-FILE" default:"aggregator.toml"`
Version bool `long:"version" description:"Display version of the binary"`
Interim bool `short:"i" long:"interim" description:"Perform an interim scan"`
}
func errCheck(err error) {
if err != nil {
if e, ok := err.(*flags.Error); ok && e.Type == flags.ErrHelp {
os.Exit(0)
}
log.Fatalf("error: %v\n", err)
}
}
func lock(lockFile *string, fn func() error) error {
if lockFile == nil {
// No locking configured.
return fn()
}
fl := flock.New(*lockFile)
locked, err := fl.TryLock()
if err != nil {
return fmt.Errorf("file locking failed: %v", err)
}
if !locked {
return fmt.Errorf("cannot lock to file %s", *lockFile)
}
defer fl.Unlock()
return fn()
}
func main() {
opts := new(options)
_, err := flags.Parse(opts)
errCheck(err)
if opts.Version {
fmt.Println(util.SemVersion)
return
}
interim := opts.Interim
cfg, err := loadConfig(opts.Config)
errCheck(err)
if interim {
cfg.Interim = true
}
p := processor{cfg: cfg}
errCheck(lock(cfg.LockFile, p.process))
}

View file

@ -0,0 +1,516 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"bytes"
"crypto/sha256"
"crypto/sha512"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"net/url"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"time"
"github.com/ProtonMail/gopenpgp/v2/crypto"
"github.com/csaf-poc/csaf_distribution/csaf"
"github.com/csaf-poc/csaf_distribution/util"
)
func (w *worker) handleROLIE(
rolie interface{},
process func(*csaf.TLPLabel, []string) error,
) error {
base, err := url.Parse(w.loc)
if err != nil {
return err
}
var feeds [][]csaf.Feed
if err := util.ReMarshalJSON(&feeds, rolie); err != nil {
return err
}
log.Printf("Found %d ROLIE feed(s).\n", len(feeds))
for _, fs := range feeds {
for i := range fs {
feed := &fs[i]
if feed.URL == nil {
continue
}
up, err := url.Parse(string(*feed.URL))
if err != nil {
log.Printf("Invalid URL %s in feed: %v.", *feed.URL, err)
continue
}
feedURL := base.ResolveReference(up).String()
log.Printf("Feed URL: %s\n", feedURL)
fb, err := util.BaseURL(feedURL)
if err != nil {
log.Printf("error: Invalid feed base URL '%s': %v\n", fb, err)
continue
}
feedBaseURL, err := url.Parse(fb)
if err != nil {
log.Printf("error: Cannot parse feed base URL '%s': %v\n", fb, err)
continue
}
res, err := w.client.Get(feedURL)
if err != nil {
log.Printf("error: Cannot get feed '%s'\n", err)
continue
}
if res.StatusCode != http.StatusOK {
log.Printf("error: Fetching %s failed. Status code %d (%s)",
feedURL, res.StatusCode, res.Status)
continue
}
rfeed, err := func() (*csaf.ROLIEFeed, error) {
defer res.Body.Close()
return csaf.LoadROLIEFeed(res.Body)
}()
if err != nil {
log.Printf("Loading ROLIE feed failed: %v.", err)
continue
}
files := resolveURLs(rfeed.Files(), feedBaseURL)
if err := process(feed.TLPLabel, files); err != nil {
return err
}
}
}
return nil
}
// mirrorAllowed checks if mirroring is allowed.
func (w *worker) mirrorAllowed() bool {
var b bool
return w.expr.Extract(
`$.mirror_on_CSAF_aggregators`,
util.BoolMatcher(&b), false, w.metadataProvider) == nil && b
}
func (w *worker) mirror() (*csaf.AggregatorCSAFProvider, error) {
result, err := w.mirrorInternal()
if err != nil && w.dir != "" {
// If something goes wrong remove the debris.
if err := os.RemoveAll(w.dir); err != nil {
log.Printf("error: %v\n", err)
}
}
return result, err
}
func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) {
// Check if we are allowed to mirror this domain.
if !w.mirrorAllowed() {
return nil, fmt.Errorf(
"no mirroring of '%s' allowed", w.provider.Name)
}
// Collecting the summaries of the advisories.
w.summaries = make(map[string][]summary)
// Check if we have ROLIE feeds.
rolie, err := w.expr.Eval(
"$.distributions[*].rolie.feeds", w.metadataProvider)
if err != nil {
log.Printf("rolie check failed: %v\n", err)
return nil, err
}
fs, hasRolie := rolie.([]interface{})
hasRolie = hasRolie && len(fs) > 0
if hasRolie {
if err := w.handleROLIE(rolie, w.mirrorFiles); err != nil {
return nil, err
}
} else {
// No rolie feeds -> try to load files from index.txt
baseURL, err := util.BaseURL(w.loc)
if err != nil {
return nil, err
}
files, err := w.loadIndex(baseURL)
if err != nil {
return nil, err
}
_ = files
// XXX: Is treating as white okay? better look into the advisories?
white := csaf.TLPLabel(csaf.TLPLabelWhite)
if err := w.mirrorFiles(&white, files); err != nil {
return nil, err
}
} // TODO: else scan directories?
if err := w.writeIndices(); err != nil {
return nil, err
}
if err := w.doMirrorTransaction(); err != nil {
return nil, err
}
if err := w.writeProviderMetadata(); err != nil {
return nil, err
}
acp, err := w.createAggregatorProvider()
if err != nil {
return nil, err
}
// Add us as a miiror.
mirrorURL := csaf.ProviderURL(
fmt.Sprintf("%s/.well-known/csaf-aggregator/%s/provider-metadata.json",
w.cfg.Domain, w.provider.Name))
acp.Mirrors = []csaf.ProviderURL{
mirrorURL,
}
return acp, err
}
func (w *worker) labelsFromSummaries() []csaf.TLPLabel {
labels := make([]csaf.TLPLabel, 0, len(w.summaries))
for label := range w.summaries {
labels = append(labels, csaf.TLPLabel(label))
}
sort.Slice(labels, func(i, j int) bool { return labels[i] < labels[j] })
return labels
}
// writeProviderMetadata writes a local provider metadata for a mirror.
func (w *worker) writeProviderMetadata() error {
fname := filepath.Join(w.dir, "provider-metadata.json")
pm := csaf.NewProviderMetadataDomain(
w.cfg.Domain,
w.labelsFromSummaries())
// Figure out the role
var role csaf.MetadataRole
if strings.HasPrefix(w.provider.Domain, "https://") {
role = csaf.MetadataRolePublisher
} else {
role = csaf.MetadataRoleProvider
}
pm.Role = &role
pm.Publisher = new(csaf.Publisher)
var lastUpdate time.Time
if err := w.expr.Match([]util.PathEvalMatcher{
{Expr: `$.publisher`, Action: util.ReMarshalMatcher(pm.Publisher)},
{Expr: `$.last_updated`, Action: util.TimeMatcher(&lastUpdate, time.RFC3339)},
{Expr: `$.public_openpgp_keys`, Action: util.ReMarshalMatcher(&pm.PGPKeys)},
}, w.metadataProvider); err != nil {
// only log the errors
log.Printf("extracting data from orignal provider failed: %v\n", err)
}
key, err := w.cfg.cryptoKey()
if err != nil {
log.Printf("error: %v\n", err)
}
if key != nil {
pm.SetPGP(key.GetFingerprint(), w.cfg.GetOpenPGPURL(key))
}
la := csaf.TimeStamp(lastUpdate)
pm.LastUpdated = &la
return util.WriteToFile(fname, pm)
}
// createAggregatorProvider, der the "metadata" section in the "csaf_providers" of
// the aggregator document.
func (w *worker) createAggregatorProvider() (*csaf.AggregatorCSAFProvider, error) {
const (
lastUpdatedExpr = `$.last_updated`
publisherExpr = `$.publisher`
roleExpr = `$.role`
urlExpr = `$.canonical_url`
)
var (
lastUpdatedT time.Time
pub csaf.Publisher
roleS string
urlS string
)
if err := w.expr.Match([]util.PathEvalMatcher{
{Expr: lastUpdatedExpr, Action: util.TimeMatcher(&lastUpdatedT, time.RFC3339)},
{Expr: publisherExpr, Action: util.ReMarshalMatcher(&pub)},
{Expr: roleExpr, Action: util.StringMatcher(&roleS)},
{Expr: urlExpr, Action: util.StringMatcher(&urlS)},
}, w.metadataProvider); err != nil {
return nil, err
}
var (
lastUpdated = csaf.TimeStamp(lastUpdatedT)
role = csaf.MetadataRole(roleS)
url = csaf.ProviderURL(urlS)
)
return &csaf.AggregatorCSAFProvider{
Metadata: &csaf.AggregatorCSAFProviderMetadata{
LastUpdated: &lastUpdated,
Publisher: &pub,
Role: &role,
URL: &url,
},
}, nil
}
// doMirrorTransaction performs an atomic directory swap.
func (w *worker) doMirrorTransaction() error {
webTarget := filepath.Join(
w.cfg.Web, ".well-known", "csaf-aggregator", w.provider.Name)
var oldWeb string
// Resolve old to be removed later
if _, err := os.Stat(webTarget); err != nil {
if !os.IsNotExist(err) {
os.RemoveAll(w.dir)
return err
}
} else {
if oldWeb, err = filepath.EvalSymlinks(webTarget); err != nil {
os.RemoveAll(w.dir)
return err
}
}
// Check if there is a sysmlink already.
target := filepath.Join(w.cfg.Folder, w.provider.Name)
log.Printf("target: '%s'\n", target)
exists, err := util.PathExists(target)
if err != nil {
os.RemoveAll(w.dir)
return err
}
if exists {
if err := os.RemoveAll(target); err != nil {
os.RemoveAll(w.dir)
return err
}
}
log.Printf("sym link: %s -> %s\n", w.dir, target)
// Create a new symlink
if err := os.Symlink(w.dir, target); err != nil {
os.RemoveAll(w.dir)
return err
}
// Move the symlink
log.Printf("Move: %s -> %s\n", target, webTarget)
if err := os.Rename(target, webTarget); err != nil {
os.RemoveAll(w.dir)
return err
}
// Finally remove the old folder.
if oldWeb != "" {
return os.RemoveAll(oldWeb)
}
return nil
}
// downloadSignature downloads an OpenPGP signature from a given url.
func (w *worker) downloadSignature(path string) (string, error) {
res, err := w.client.Get(path)
if err != nil {
return "", err
}
if res.StatusCode != http.StatusOK {
return "", errNotFound
}
data, err := func() ([]byte, error) {
defer res.Body.Close()
return io.ReadAll(res.Body)
}()
if err != nil {
return "", err
}
result := string(data)
if _, err := crypto.NewPGPMessageFromArmored(result); err != nil {
return "", err
}
return result, nil
}
// sign signs the given data with the configured key.
func (w *worker) sign(data []byte) (string, error) {
if w.signRing == nil {
key, err := w.cfg.cryptoKey()
if err != nil {
return "", err
}
if key == nil {
return "", nil
}
if pp := w.cfg.Passphrase; pp != nil {
if key, err = key.Unlock([]byte(*pp)); err != nil {
return "", err
}
}
if w.signRing, err = crypto.NewKeyRing(key); err != nil {
return "", err
}
}
sig, err := w.signRing.SignDetached(crypto.NewPlainMessage(data))
if err != nil {
return "", err
}
return sig.GetArmored()
}
func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
label := "unknown"
if tlpLabel != nil {
label = strings.ToLower(string(*tlpLabel))
}
summaries := w.summaries[label]
dir, err := w.createDir()
if err != nil {
return err
}
var content bytes.Buffer
yearDirs := make(map[int]string)
for _, file := range files {
u, err := url.Parse(file)
if err != nil {
log.Printf("error: %s\n", err)
continue
}
filename := util.CleanFileName(filepath.Base(u.Path))
var advisory interface{}
s256 := sha256.New()
s512 := sha512.New()
content.Reset()
hasher := io.MultiWriter(s256, s512, &content)
download := func(r io.Reader) error {
tee := io.TeeReader(r, hasher)
return json.NewDecoder(tee).Decode(&advisory)
}
if err := downloadJSON(w.client, file, download); err != nil {
log.Printf("error: %v\n", err)
continue
}
errors, err := csaf.ValidateCSAF(advisory)
if err != nil {
log.Printf("error: %s: %v", file, err)
continue
}
if len(errors) > 0 {
log.Printf("CSAF file %s has %d validation errors.",
file, len(errors))
continue
}
sum, err := csaf.NewAdvisorySummary(w.expr, advisory)
if err != nil {
log.Printf("error: %s: %v\n", file, err)
continue
}
summaries = append(summaries, summary{
filename: filename,
summary: sum,
url: file,
})
year := sum.InitialReleaseDate.Year()
yearDir := yearDirs[year]
if yearDir == "" {
yearDir = filepath.Join(dir, label, strconv.Itoa(year))
if err := os.MkdirAll(yearDir, 0755); err != nil {
return err
}
//log.Printf("created %s\n", yearDir)
yearDirs[year] = yearDir
}
fname := filepath.Join(yearDir, filename)
//log.Printf("write: %s\n", fname)
data := content.Bytes()
if err := writeFileHashes(
fname, filename,
data, s256.Sum(nil), s512.Sum(nil),
); err != nil {
return err
}
// Try to fetch signature file.
sigURL := file + ".asc"
ascFile := fname + ".asc"
if err := w.downloadSignatureOrSign(sigURL, ascFile, data); err != nil {
return err
}
}
w.summaries[label] = summaries
return nil
}
// downloadSignatureOrSign first tries to download a signature.
// If this fails it creates a signature itself with the configured key.
func (w *worker) downloadSignatureOrSign(url, fname string, data []byte) error {
sig, err := w.downloadSignature(url)
if err != nil {
if err != errNotFound {
log.Printf("error: %s: %v\n", url, err)
}
// Sign it our self.
if sig, err = w.sign(data); err != nil {
return err
}
}
if sig != "" {
err = os.WriteFile(fname, []byte(sig), 0644)
}
return err
}

View file

@ -0,0 +1,255 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"encoding/json"
"errors"
"io"
"log"
"net/http"
"os"
"path/filepath"
"strings"
"github.com/ProtonMail/gopenpgp/v2/crypto"
"github.com/csaf-poc/csaf_distribution/csaf"
"github.com/csaf-poc/csaf_distribution/util"
)
type processor struct {
cfg *config
}
type summary struct {
filename string
summary *csaf.AdvisorySummary
url string
}
type worker struct {
num int
expr *util.PathEval
cfg *config
signRing *crypto.KeyRing
client client // client per provider
provider *provider // current provider
metadataProvider interface{} // current metadata provider
loc string // URL of current provider-metadata.json
dir string // Directory to store data to.
summaries map[string][]summary // the summaries of the advisories.
}
func newWorker(num int, config *config) *worker {
return &worker{
num: num,
cfg: config,
expr: util.NewPathEval(),
}
}
func ensureDir(path string) error {
_, err := os.Stat(path)
if err != nil && os.IsNotExist(err) {
return os.MkdirAll(path, 0750)
}
return err
}
func (w *worker) createDir() (string, error) {
if w.dir != "" {
return w.dir, nil
}
dir, err := util.MakeUniqDir(
filepath.Join(w.cfg.Folder, w.provider.Name))
if err == nil {
w.dir = dir
}
return dir, err
}
// httpsDomain prefixes a domain with 'https://'.
func httpsDomain(domain string) string {
if strings.HasPrefix(domain, "https://") {
return domain
}
return "https://" + domain
}
var providerMetadataLocations = [...]string{
".well-known/csaf",
"security/data/csaf",
"advisories/csaf",
"security/csaf",
}
func (w *worker) locateProviderMetadata(domain string) error {
w.metadataProvider = nil
download := func(r io.Reader) error {
if err := json.NewDecoder(r).Decode(&w.metadataProvider); err != nil {
log.Printf("error: %s\n", err)
return errNotFound
}
return nil
}
hd := httpsDomain(domain)
for _, loc := range providerMetadataLocations {
url := hd + "/" + loc
if err := downloadJSON(w.client, url, download); err != nil {
if err == errNotFound {
continue
}
return err
}
if w.metadataProvider != nil {
w.loc = loc
return nil
}
}
// Read from security.txt
path := hd + "/.well-known/security.txt"
res, err := w.client.Get(path)
if err != nil {
return err
}
if res.StatusCode != http.StatusOK {
return errNotFound
}
if err := func() error {
defer res.Body.Close()
urls, err := csaf.ExtractProviderURL(res.Body, false)
if err != nil {
return err
}
if len(urls) == 0 {
return errors.New("no provider-metadata.json found in secturity.txt")
}
w.loc = urls[0]
return nil
}(); err != nil {
return err
}
return downloadJSON(w.client, w.loc, download)
}
// removeOrphans removes the directories that are not in the providers list.
func (p *processor) removeOrphans() error {
keep := make(map[string]bool)
for _, p := range p.cfg.Providers {
keep[p.Name] = true
}
path := filepath.Join(p.cfg.Web, ".well-known", "csaf-aggregator")
entries, err := func() ([]os.DirEntry, error) {
dir, err := os.Open(path)
if err != nil {
return nil, err
}
defer dir.Close()
return dir.ReadDir(-1)
}()
if err != nil {
return err
}
prefix, err := filepath.Abs(p.cfg.Folder)
if err != nil {
return err
}
prefix, err = filepath.EvalSymlinks(prefix)
if err != nil {
return err
}
for _, entry := range entries {
if keep[entry.Name()] {
continue
}
fi, err := entry.Info()
if err != nil {
log.Printf("error: %v\n", err)
continue
}
// only remove the symlinks
if fi.Mode()&os.ModeSymlink != os.ModeSymlink {
continue
}
d := filepath.Join(path, entry.Name())
r, err := filepath.EvalSymlinks(d)
if err != nil {
log.Printf("error: %v\n", err)
continue
}
fd, err := os.Stat(r)
if err != nil {
log.Printf("error: %v\n", err)
continue
}
// If its not a directory its not a mirror.
if !fd.IsDir() {
continue
}
// Remove the link.
log.Printf("removing link %s -> %s\n", d, r)
if err := os.Remove(d); err != nil {
log.Printf("error: %v\n", err)
continue
}
// Only remove directories which are in our folder.
if rel, err := filepath.Rel(prefix, r); err == nil &&
rel == filepath.Base(r) {
log.Printf("removing directory %s\n", r)
if err := os.RemoveAll(r); err != nil {
log.Printf("error: %v\n", err)
}
}
}
return nil
}
// process is the main driver of the jobs handled by work.
func (p *processor) process() error {
if err := ensureDir(p.cfg.Folder); err != nil {
return err
}
web := filepath.Join(p.cfg.Web, ".well-known", "csaf-aggregator")
if err := ensureDir(web); err != nil {
return err
}
if err := p.removeOrphans(); err != nil {
return err
}
if p.cfg.Interim {
return p.interim()
}
return p.full()
}

View file

@ -0,0 +1,28 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
import (
"log"
"net/url"
)
// resolveURLs resolves a list of URLs urls against a base URL base.
func resolveURLs(urls []string, base *url.URL) []string {
out := make([]string, 0, len(urls))
for _, u := range urls {
p, err := url.Parse(u)
if err != nil {
log.Printf("error: Invalid URL '%s': %v\n", u, err)
continue
}
out = append(out, base.ResolveReference(p).String())
}
return out
}

View file

@ -376,7 +376,7 @@ func (p *processor) integrity(
}
h, err := func() ([]byte, error) {
defer res.Body.Close()
return hashFromReader(res.Body)
return util.HashFromReader(res.Body)
}()
if err != nil {
p.badIntegrities.add("Reading %s failed: %v.", hashFile, err)
@ -461,7 +461,7 @@ func (p *processor) processROLIEFeed(feed string) error {
p.badProviderMetadata.add("Loading ROLIE feed failed: %v.", err)
return errContinue
}
base, err := basePath(feed)
base, err := util.BaseURL(feed)
if err != nil {
p.badProviderMetadata.add("Bad base path: %v", err)
return errContinue
@ -639,7 +639,7 @@ func (p *processor) checkCSAFs(domain string) error {
}
// No rolie feeds
base, err := basePath(p.pmdURL)
base, err := util.BaseURL(p.pmdURL)
if err != nil {
return err
}
@ -745,12 +745,12 @@ func (p *processor) locateProviderMetadata(
}
if res.StatusCode != http.StatusOK {
return err
return nil
}
loc, err := func() (string, error) {
defer res.Body.Close()
return extractProviderURL(res.Body)
return p.extractProviderURL(res.Body)
}()
if err != nil {
@ -767,24 +767,24 @@ func (p *processor) locateProviderMetadata(
return err
}
func extractProviderURL(r io.Reader) (string, error) {
sc := bufio.NewScanner(r)
const csaf = "CSAF:"
for sc.Scan() {
line := sc.Text()
if strings.HasPrefix(line, csaf) {
line = strings.TrimSpace(line[len(csaf):])
if !strings.HasPrefix(line, "https://") {
return "", errors.New("CSAF: found in security.txt, but does not start with https://")
}
return line, nil
}
}
if err := sc.Err(); err != nil {
func (p *processor) extractProviderURL(r io.Reader) (string, error) {
urls, err := csaf.ExtractProviderURL(r, true)
if err != nil {
return "", err
}
return "", nil
if len(urls) == 0 {
return "", errors.New("No provider-metadata.json found")
}
if len(urls) > 1 {
p.badSecurity.use()
p.badSecurity.add("Found %d CSAF entries in security.txt", len(urls))
}
if !strings.HasPrefix(urls[0], "https://") {
p.badSecurity.use()
p.badSecurity.add("CSAF URL does not start with https://: %s", urls[0])
}
return urls[0], nil
}
// checkProviderMetadata checks provider-metadata.json. If it exists,

View file

@ -17,7 +17,6 @@ import (
"net/http"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"time"
@ -29,18 +28,8 @@ import (
const dateFormat = time.RFC3339
// cleanFileName removes the "/" "\" charachters and replace the two or more
// occurences of "." with only one from the passed string.
func cleanFileName(s string) string {
s = strings.ReplaceAll(s, `/`, ``)
s = strings.ReplaceAll(s, `\`, ``)
r := regexp.MustCompile(`\.{2,}`)
s = r.ReplaceAllString(s, `.`)
return s
}
// loadCSAF loads the csaf file from the request, calls the "UploadLimter" function to
// set the upload limit size of the file and the "cleanFileName" to refine
// set the upload limit size of the file and the refines
// the filename. It returns the filename, file content in a buffer of bytes
// and an error.
func (c *controller) loadCSAF(r *http.Request) (string, []byte, error) {
@ -54,7 +43,7 @@ func (c *controller) loadCSAF(r *http.Request) (string, []byte, error) {
if _, err := io.Copy(&buf, c.cfg.uploadLimiter(file)); err != nil {
return "", nil, err
}
return cleanFileName(handler.Filename), buf.Bytes(), nil
return util.CleanFileName(handler.Filename), buf.Bytes(), nil
}
func (c *controller) handleSignature(

View file

@ -11,41 +11,26 @@ package main
import (
"crypto/sha256"
"crypto/sha512"
"fmt"
"hash"
"io/ioutil"
"os"
"github.com/csaf-poc/csaf_distribution/util"
)
func writeHash(fname, name string, h hash.Hash, data []byte) error {
if _, err := h.Write(data); err != nil {
return err
}
f, err := os.Create(fname)
if err != nil {
return err
}
fmt.Fprintf(f, "%x %s\n", h.Sum(nil), name)
return f.Close()
}
func writeHashedFile(fname, name string, data []byte, armored string) error {
// Write the file itself.
if err := ioutil.WriteFile(fname, data, 0644); err != nil {
if err := os.WriteFile(fname, data, 0644); err != nil {
return err
}
// Write SHA256 sum.
if err := writeHash(fname+".sha256", name, sha256.New(), data); err != nil {
if err := util.WriteHashToFile(fname+".sha256", name, sha256.New(), data); err != nil {
return err
}
// Write SHA512 sum.
if err := writeHash(fname+".sha512", name, sha512.New(), data); err != nil {
if err := util.WriteHashToFile(fname+".sha512", name, sha512.New(), data); err != nil {
return err
}
// Write signature.
if err := ioutil.WriteFile(fname+".asc", []byte(armored), 0644); err != nil {
if err := os.WriteFile(fname+".asc", []byte(armored), 0644); err != nil {
return err
}
return nil

View file

@ -37,11 +37,12 @@ const (
)
var tlpLabelPattern = alternativesUnmarshal(
string("UNLABELED"),
string("WHITE"),
string("GREEN"),
string("AMBER"),
string("RED"))
string(TLPLabelUnlabeled),
string(TLPLabelWhite),
string(TLPLabelGreen),
string(TLPLabelAmber),
string(TLPLabelRed),
)
// JSONURL is an URL to JSON document.
type JSONURL string
@ -162,6 +163,188 @@ type ProviderMetadata struct {
Role *MetadataRole `json:"role"` // required
}
// AggregatorCategory is the category of the aggregator.
type AggregatorCategory string
const (
// AggregatorAggregator represents the "aggregator" type of aggregators.
AggregatorAggregator AggregatorCategory = "aggregator"
// AggregatorLister represents the "listers" type of aggregators.
AggregatorLister AggregatorCategory = "lister"
)
var aggregatorCategoryPattern = alternativesUnmarshal(
string(AggregatorAggregator),
string(AggregatorLister),
)
// AggregatorVersion is the version of the aggregator.
type AggregatorVersion string
const (
// AggregatorVersion20 is version 2.0 of the aggregator.
AggregatorVersion20 AggregatorVersion = "2.0"
)
var aggregatorVersionPattern = alternativesUnmarshal(
string(AggregatorVersion20),
)
// AggregatorInfo reflects the 'aggregator' object in the aggregator.
type AggregatorInfo struct {
Category *AggregatorCategory `json:"category,omitempty" toml:"category"` // required
Name string `json:"name" toml:"name"` // required
ContactDetails string `json:"contact_details,omitempty" toml:"contact_details"`
IssuingAuthority string `json:"issuing_authority,omitempty" toml:"issuing_authority"`
Namespace string `json:"namespace" toml:"namespace"` // required
}
// AggregatorURL is the URL of the aggregator document.
type AggregatorURL string
var aggregatorURLPattern = patternUnmarshal(`/aggregator\.json$`)
// AggregatorCSAFProviderMetadata reflects 'csaf_providers.metadata' in an aggregator.
type AggregatorCSAFProviderMetadata struct {
LastUpdated *TimeStamp `json:"last_updated,omitempty"` // required
Publisher *Publisher `json:"publisher,omitempty"` // required
Role *MetadataRole `json:"role,omitempty"`
URL *ProviderURL `json:"url,omitempty"` // required
}
// AggregatorCSAFProvider reflects one 'csaf_trusted_provider' in an aggregator.
type AggregatorCSAFProvider struct {
Metadata *AggregatorCSAFProviderMetadata `json:"metadata,omitempty"` // required
Mirrors []ProviderURL `json:"mirrors,omitempty"` // required
}
// Aggregator is the CSAF Aggregator.
type Aggregator struct {
Aggregator *AggregatorInfo `json:"aggregator,omitempty"` // required
Version *AggregatorVersion `json:"aggregator_version,omitempty"` // required
CanonicalURL *AggregatorURL `json:"canonical_url,omitempty"` // required
CSAFProviders []*AggregatorCSAFProvider `json:"csaf_providers,omitempty"` // required
LastUpdated *TimeStamp `json:"last_updated,omitempty"` // required
}
// Validate validates the current state of the AggregatorCategory.
func (ac *AggregatorCategory) Validate() error {
if ac == nil {
return errors.New("aggregator.aggregator.category is mandatory")
}
return nil
}
// Validate validates the current state of the AggregatorVersion.
func (av *AggregatorVersion) Validate() error {
if av == nil {
return errors.New("aggregator.aggregator_version is mandatory")
}
return nil
}
// Validate validates the current state of the AggregatorURL.
func (au *AggregatorURL) Validate() error {
if au == nil {
return errors.New("aggregator.aggregator_url is mandatory")
}
return nil
}
// Validate validates the current state of the AggregatorInfo.
func (ai *AggregatorInfo) Validate() error {
if err := ai.Category.Validate(); err != nil {
return err
}
if ai.Name == "" {
return errors.New("aggregator.aggregator.name is mandatory")
}
if ai.Namespace == "" {
return errors.New("aggregator.aggregator.namespace is mandatory")
}
return nil
}
// Validate validates the current state of the AggregatorCSAFProviderMetadata.
func (acpm *AggregatorCSAFProviderMetadata) Validate() error {
if acpm == nil {
return errors.New("aggregator.csaf_providers[].metadata is mandatory")
}
if acpm.LastUpdated == nil {
return errors.New("aggregator.csaf_providers[].metadata.last_updated is mandatory")
}
if acpm.Publisher == nil {
return errors.New("aggregator.csaf_providers[].metadata.publisher is mandatory")
}
if err := acpm.Publisher.Validate(); err != nil {
return err
}
if acpm.URL == nil {
return errors.New("aggregator.csaf_providers[].metadata.url is mandatory")
}
return nil
}
// Validate validates the current state of the AggregatorCSAFProvider.
func (acp *AggregatorCSAFProvider) Validate() error {
if acp == nil {
return errors.New("aggregator.csaf_providers[] not allowed to be nil")
}
if err := acp.Metadata.Validate(); err != nil {
return err
}
return nil
}
// Validate validates the current state of the Aggregator.
func (a *Aggregator) Validate() error {
if err := a.Aggregator.Validate(); err != nil {
return err
}
if err := a.Version.Validate(); err != nil {
return err
}
if err := a.CanonicalURL.Validate(); err != nil {
return err
}
for _, provider := range a.CSAFProviders {
if err := provider.Validate(); err != nil {
return err
}
}
if a.LastUpdated == nil {
return errors.New("Aggregator.LastUpdate == nil")
}
return nil
}
// UnmarshalText implements the encoding.TextUnmarshaller interface.
func (ac *AggregatorCategory) UnmarshalText(data []byte) error {
s, err := aggregatorCategoryPattern(data)
if err == nil {
*ac = AggregatorCategory(s)
}
return err
}
// UnmarshalText implements the encoding.TextUnmarshaller interface.
func (av *AggregatorVersion) UnmarshalText(data []byte) error {
s, err := aggregatorVersionPattern(data)
if err == nil {
*av = AggregatorVersion(s)
}
return err
}
// UnmarshalText implements the encoding.TextUnmarshaller interface.
func (au *AggregatorURL) UnmarshalText(data []byte) error {
s, err := aggregatorURLPattern(data)
if err == nil {
*au = AggregatorURL(s)
}
return err
}
func patternUnmarshal(pattern string) func([]byte) (string, error) {
r := regexp.MustCompile(pattern)
return func(data []byte) (string, error) {
@ -485,3 +668,12 @@ func LoadProviderMetadata(r io.Reader) (*ProviderMetadata, error) {
return &pmd, nil
}
// WriteTo saves an AggregatorURL to a writer.
func (a *Aggregator) WriteTo(w io.Writer) (int64, error) {
nw := util.NWriter{Writer: w, N: 0}
enc := json.NewEncoder(&nw)
enc.SetIndent("", " ")
err := enc.Encode(a)
return nw.N, err
}

View file

@ -0,0 +1,215 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://docs.oasis-open.org/csaf/csaf/v2.0/aggregator_json_schema.json",
"title": "CSAF aggregator",
"description": "Representation of information where to find CSAF providers as a JSON document.",
"type": "object",
"$defs": {
"aggregator_url_t": {
"title": "Aggregator URL type",
"description": "Contains a URL.",
"type": "string",
"format": "uri",
"pattern": "/aggregator\\.json$"
},
"metadata_t": {
"title": "CSAF issuing party metadata.",
"description": "Contains the metadata of a single CSAF issuing party.",
"type": "object",
"required": [
"last_updated",
"publisher",
"url"
],
"properties": {
"last_updated": {
"title": "Last updated",
"description": "Holds the date and time when this entry was last updated.",
"type": "string",
"format": "date-time"
},
"publisher": {
"title": "Publisher",
"description": "Provides information about the issuing party for this entry.",
"$ref": "https://docs.oasis-open.org/csaf/csaf/v2.0/provider_json_schema.json#/properties/publisher"
},
"role": {
"title": "Role of the issuing party",
"description": "Contains the role of the issuing party according to section 7 in the CSAF standard.",
"$ref": "https://docs.oasis-open.org/csaf/csaf/v2.0/provider_json_schema.json#/properties/role"
},
"url": {
"title": "URL of the metadata",
"description": "Contains the URL of the provider-metadata.json for that entry.",
"$ref": "https://docs.oasis-open.org/csaf/csaf/v2.0/provider_json_schema.json#/properties/canonical_url"
}
}
},
"mirrors_t": {
"title": "List of mirrors",
"description": "Contains a list of URLs or mirrors for this issuing party.",
"type": "array",
"minItems": 1,
"uniqueItems": true,
"items": {
"title": "Mirror",
"description": "Contains the base URL of the mirror for this issuing party.",
"$ref": "https://docs.oasis-open.org/csaf/csaf/v2.0/provider_json_schema.json#/$defs/provider_url_t"
}
}
},
"required": [
"aggregator",
"aggregator_version",
"canonical_url",
"csaf_providers",
"last_updated"
],
"properties": {
"aggregator": {
"title": "Aggregator",
"description": "Provides information about the aggregator.",
"type": "object",
"required": [
"category",
"name",
"namespace"
],
"properties": {
"category": {
"title": "Category of aggregator",
"description": "Provides information about the category of aggregator.",
"type": "string",
"enum": [
"aggregator",
"lister"
]
},
"contact_details": {
"title": "Contact details",
"description": "Information on how to contact the aggregator, possibly including details such as web sites, email addresses, phone numbers, and postal mail addresses.",
"type": "string",
"minLength": 1,
"examples": [
"Aggregator can be reached at contact_us@aggregator.example.com, or via our website at https://www.example.com/security/csaf/aggregator/contact."
]
},
"issuing_authority": {
"title": "Issuing authority",
"description": "Provides information about the authority of the aggregator to release the list, in particular, the party's constituency and responsibilities or other obligations.",
"type": "string",
"minLength": 1
},
"name": {
"title": "Name of aggregator",
"description": "Contains the name of the aggregator.",
"type": "string",
"minLength": 1,
"examples": [
"BSI",
"CISA",
"CSAF TC"
]
},
"namespace": {
"title": "Namespace of aggregator",
"description": "Contains a URL which is under control of the aggregator and can be used as a globally unique identifier for that aggregator.",
"type": "string",
"format": "uri",
"examples": [
"https://www.example.com",
"https://csaf.io"
]
}
}
},
"aggregator_version": {
"title": "CSAF aggregator version",
"description": "Gives the version of the CSAF aggregator specification which the document was generated for.",
"type": "string",
"enum": [
"2.0"
]
},
"canonical_url": {
"title": "Canonical URL",
"description": "Contains the URL for this document.",
"$ref": "#/$defs/aggregator_url_t"
},
"csaf_providers": {
"title": "List of CSAF providers",
"description": "Contains a list with information from CSAF providers.",
"type": "array",
"minItems": 1,
"uniqueItems": true,
"items": {
"title": "CSAF provider entry",
"description": "Contains information from a CSAF provider.",
"type": "object",
"required": [
"metadata"
],
"properties": {
"metadata": {
"title": "CSAF provider metadata.",
"description": "Contains the metadata of a single CSAF provider.",
"$ref": "#/$defs/metadata_t"
},
"mirrors": {
"title": "List of mirrors",
"description": "Contains a list of URLs or mirrors for this CSAF provider.",
"$ref": "#/$defs/mirrors_t"
}
}
}
},
"csaf_publishers": {
"title": "List of CSAF publishers",
"description": "Contains a list with information from CSAF publishers.",
"type": "array",
"minItems": 1,
"uniqueItems": true,
"items": {
"title": "CSAF publisher entry",
"description": "Contains information from a CSAF publisher.",
"type": "object",
"required": [
"metadata",
"mirror",
"update_interval"
],
"properties": {
"metadata": {
"title": "CSAF publisher metadata.",
"description": "Contains the metadata of a single CSAF publisher extracted from one of its CSAF documents.",
"$ref": "#/$defs/metadata_t"
},
"mirrors": {
"title": "List of mirrors",
"description": "Contains a list of URLs or mirrors for this CSAF publisher.",
"$ref": "#/$defs/mirrors_t"
},
"update_interval": {
"title": "Update interval",
"description": "Contains information about how often the CSAF publisher is checked for new CSAF documents.",
"type": "string",
"minLength": 1,
"examples": [
"daily",
"weekly",
"monthly",
"on best effort",
"on notification by CSAF publisher"
]
}
}
}
},
"last_updated": {
"title": "Last updated",
"description": "Holds the date and time when the document was last updated.",
"type": "string",
"format": "date-time"
}
}
}

View file

@ -22,6 +22,7 @@ const (
currentReleaseDateExpr = `$.document.tracking.current_release_date`
tlpLabelExpr = `$.document.distribution.tlp.label`
summaryExpr = `$.document.notes[? @.category=="summary" || @.type=="summary"].text`
statusExpr = `$.document.tracking.status`
)
// AdvisorySummary is a summary of some essentials of an CSAF advisory.
@ -33,6 +34,7 @@ type AdvisorySummary struct {
CurrentReleaseDate time.Time
Summary string
TLPLabel string
Status string
}
// NewAdvisorySummary creates a summary from an advisory doc
@ -54,6 +56,7 @@ func NewAdvisorySummary(
{Expr: summaryExpr, Action: util.StringMatcher(&e.Summary), Optional: true},
{Expr: tlpLabelExpr, Action: util.StringMatcher(&e.TLPLabel), Optional: true},
{Expr: publisherExpr, Action: util.ReMarshalMatcher(e.Publisher)},
{Expr: statusExpr, Action: util.StringMatcher(&e.Status)},
}, doc); err != nil {
return nil, err
}

38
csaf/util.go Normal file
View file

@ -0,0 +1,38 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package csaf
import (
"bufio"
"io"
"strings"
)
// ExtractProviderURL extracts URLs of provider metadata.
// If all is true all URLs are returned. Otherwise only the first is returned.
func ExtractProviderURL(r io.Reader, all bool) ([]string, error) {
const csaf = "CSAF:"
var urls []string
sc := bufio.NewScanner(r)
for sc.Scan() {
line := sc.Text()
if strings.HasPrefix(line, csaf) {
urls = append(urls, strings.TrimSpace(line[len(csaf):]))
if !all {
return urls, nil
}
}
}
if err := sc.Err(); err != nil {
return nil, err
}
return urls, nil
}

View file

@ -33,9 +33,13 @@ var cvss31 []byte
//go:embed schema/provider_json_schema.json
var providerSchema []byte
//go:embed schema/aggregator_json_schema.json
var aggregatorSchema []byte
var (
compiledCSAFSchema compiledSchema
compiledProviderSchema compiledSchema
compiledCSAFSchema compiledSchema
compiledProviderSchema compiledSchema
compiledAggregatorSchema compiledSchema
)
func init() {
@ -49,6 +53,11 @@ func init() {
{"https://docs.oasis-open.org/csaf/csaf/v2.0/provider_json_schema.json", providerSchema},
{"https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json", csafSchema},
})
compiledAggregatorSchema.compiler([]schemaData{
{"https://docs.oasis-open.org/csaf/csaf/v2.0/aggregator_json_schema.json", aggregatorSchema},
{"https://docs.oasis-open.org/csaf/csaf/v2.0/provider_json_schema.json", providerSchema},
{"https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json", csafSchema},
})
}
type schemaData struct {
@ -146,3 +155,9 @@ func ValidateCSAF(doc interface{}) ([]string, error) {
func ValidateProviderMetadata(doc interface{}) ([]string, error) {
return compiledProviderSchema.validate(doc)
}
// ValidateAggregator validates the document doc against the JSON schema
// of aggregator.
func ValidateAggregator(doc interface{}) ([]string, error) {
return compiledAggregatorSchema.validate(doc)
}

View file

@ -0,0 +1,33 @@
workers = 2
folder = "/var/csaf-aggregator"
web = "/var/csaf-aggregator/html"
domain = "https://localhost:9443"
rate = 10.0
insecure = true
[aggregator]
category = "aggregator"
name = "Example Development CSAF Aggregator"
contact_details = "some @ somewhere"
issuing_authority = "This service is provided as it is. It is gratis for everybody."
namespace = "Testnamespace"
[[providers]]
name = "local-dev-provider"
domain = "localhost"
# rate = 1.5
# insecure = true
[[providers]]
name = "local-dev-provider2"
domain = "localhost"
# rate = 1.2
# insecure = true
#key =
#passphrase =
# for testing, the specifiation requires at least two
# allow_single_provider = true

2
go.mod
View file

@ -7,10 +7,12 @@ require (
github.com/PaesslerAG/gval v1.1.2
github.com/PaesslerAG/jsonpath v0.1.1
github.com/ProtonMail/gopenpgp/v2 v2.3.0
github.com/gofrs/flock v0.8.1
github.com/jessevdk/go-flags v1.5.0
github.com/mitchellh/go-homedir v1.1.0
github.com/santhosh-tekuri/jsonschema/v5 v5.0.0
golang.org/x/crypto v0.0.0-20211202192323-5770296d904e
golang.org/x/time v0.0.0-20220210224613-90d013bbcef8
)
require (

5
go.sum
View file

@ -16,6 +16,8 @@ github.com/ProtonMail/gopenpgp/v2 v2.3.0/go.mod h1:F62x0m3akQuisX36pOgAtKOHZ1E7/
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw=
github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc=
github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=
@ -70,12 +72,15 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/time v0.0.0-20220210224613-90d013bbcef8 h1:vVKdlvoWBphwdxWKrFZEuM0kGgGLxUOYcY4U/2Vjg44=
golang.org/x/time v0.0.0-20220210224613-90d013bbcef8/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20200117012304-6edc0a871e69/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

View file

@ -13,10 +13,35 @@ import (
"math/rand"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"time"
)
var (
twoOrMoreDots = regexp.MustCompile(`\.{2,}`)
stripSlashes = strings.NewReplacer(`/`, ``, `\`, ``)
)
// CleanFileName removes the "/" "\" charachters and replace the two or more
// occurences of "." with only one from the passed string.
func CleanFileName(s string) string {
return twoOrMoreDots.ReplaceAllString(stripSlashes.Replace(s), `.`)
}
// PathExists returns true if path exits.
func PathExists(path string) (bool, error) {
_, err := os.Stat(path)
if err == nil {
return true, nil
}
if os.IsNotExist(err) {
err = nil
}
return false, err
}
// NWriter is an io.Writer counting the bytes copied through it.
type NWriter struct {
io.Writer

66
util/hash.go Normal file
View file

@ -0,0 +1,66 @@
// This file is Free Software under the MIT License
// without warranty, see README.md and LICENSES/MIT.txt for details.
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package util
import (
"bufio"
"encoding/hex"
"fmt"
"hash"
"io"
"os"
"regexp"
)
var hexRe = regexp.MustCompile(`^([[:xdigit:]]+)`)
// HashFromReader reads a base 16 coded hash sum from a reader.
func HashFromReader(r io.Reader) ([]byte, error) {
scanner := bufio.NewScanner(r)
for scanner.Scan() {
if m := hexRe.FindStringSubmatch(scanner.Text()); m != nil {
return hex.DecodeString(m[1])
}
}
return nil, scanner.Err()
}
// HashFromFile reads a base 16 coded hash sum from a file.
func HashFromFile(fname string) ([]byte, error) {
f, err := os.Open(fname)
if err != nil {
return nil, err
}
defer f.Close()
return HashFromReader(f)
}
// WriteHashToFile writes a hash of data to file fname.
func WriteHashToFile(fname, name string, h hash.Hash, data []byte) error {
if _, err := h.Write(data); err != nil {
return err
}
f, err := os.Create(fname)
if err != nil {
return err
}
fmt.Fprintf(f, "%x %s\n", h.Sum(nil), name)
return f.Close()
}
// WriteHashSumToFile writes a hash sum to file fname.
func WriteHashSumToFile(fname, name string, sum []byte) error {
f, err := os.Create(fname)
if err != nil {
return err
}
fmt.Fprintf(f, "%x %s\n", sum, name)
return f.Close()
}

View file

@ -12,6 +12,7 @@ import (
"context"
"encoding/json"
"errors"
"fmt"
"time"
"github.com/PaesslerAG/gval"
@ -76,6 +77,18 @@ func ReMarshalMatcher(dst interface{}) func(interface{}) error {
}
}
// BoolMatcher stores the matched result in a bool.
func BoolMatcher(dst *bool) func(interface{}) error {
return func(x interface{}) error {
b, ok := x.(bool)
if !ok {
return errors.New("not a bool")
}
*dst = b
return nil
}
}
// StringMatcher stores the matched result in a string.
func StringMatcher(dst *string) func(interface{}) error {
return func(x interface{}) error {
@ -111,14 +124,17 @@ func (pe *PathEval) Extract(
optional bool,
doc interface{},
) error {
x, err := pe.Eval(expr, doc)
if err != nil {
if optional {
optErr := func(err error) error {
if err == nil || optional {
return nil
}
return err
return fmt.Errorf("extract failed '%s': %v", expr, err)
}
return action(x)
x, err := pe.Eval(expr, doc)
if err != nil {
return optErr(err)
}
return optErr(action(x))
}
// Match matches a list of PathEvalMatcher pairs against a document.

View file

@ -3,33 +3,18 @@
//
// SPDX-License-Identifier: MIT
//
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
package main
package util
import (
"bufio"
"encoding/hex"
"io"
"net/url"
"regexp"
"strings"
)
var hexRe = regexp.MustCompile(`^([[:xdigit:]]+)`)
func hashFromReader(r io.Reader) ([]byte, error) {
scanner := bufio.NewScanner(r)
for scanner.Scan() {
if m := hexRe.FindStringSubmatch(scanner.Text()); m != nil {
return hex.DecodeString(m[1])
}
}
return nil, scanner.Err()
}
func basePath(p string) (string, error) {
// BaseURL returns the base URL for a given URL p.
func BaseURL(p string) (string, error) {
u, err := url.Parse(p)
if err != nil {
return "", err