mirror of
https://github.com/gocsaf/csaf.git
synced 2025-12-22 18:15:42 +01:00
Downloader: Add structured logging, fails storing and statistics
* add forwarding support in downloader * Raise needed Go version to 1.21+ so slog can be used. * Introduce validation mode flag (strict, unsafe) * Add structured logging and place log into the download folder. * Improve some code comment (bernhardreiter) * Add counting stats to downloader.
This commit is contained in:
parent
e0475791ff
commit
5459f10d39
5 changed files with 429 additions and 143 deletions
|
|
@ -11,7 +11,11 @@ package main
|
|||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/v2/internal/certs"
|
||||
"github.com/csaf-poc/csaf_distribution/v2/internal/filter"
|
||||
|
|
@ -24,6 +28,8 @@ const (
|
|||
defaultPreset = "mandatory"
|
||||
defaultForwardQueue = 5
|
||||
defaultValidationMode = validationStrict
|
||||
defaultLogFile = "downloader.log"
|
||||
defaultLogLevel = logLevelInfo
|
||||
)
|
||||
|
||||
type validationMode string
|
||||
|
|
@ -33,8 +39,17 @@ const (
|
|||
validationUnsafe = validationMode("unsafe")
|
||||
)
|
||||
|
||||
type logLevel string
|
||||
|
||||
const (
|
||||
logLevelDebug = logLevel("debug")
|
||||
logLevelInfo = logLevel("info")
|
||||
logLevelWarn = logLevel("warn")
|
||||
logLevelError = logLevel("error")
|
||||
)
|
||||
|
||||
type config struct {
|
||||
Directory *string `short:"d" long:"directory" description:"DIRectory to store the downloaded files in" value-name:"DIR" toml:"directory"`
|
||||
Directory string `short:"d" long:"directory" description:"DIRectory to store the downloaded files in" value-name:"DIR" toml:"directory"`
|
||||
Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider" toml:"insecure"`
|
||||
IgnoreSignatureCheck bool `long:"ignoresigcheck" description:"Ignore signature check results, just warn on mismatch" toml:"ignoresigcheck"`
|
||||
ClientCert *string `long:"client-cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE" toml:"client_cert"`
|
||||
|
|
@ -62,6 +77,10 @@ type config struct {
|
|||
ForwardQueue int `long:"forwardqueue" description:"Maximal queue LENGTH before forwarder" value-name:"LENGTH" toml:"forward_queue"`
|
||||
ForwardInsecure bool `long:"forwardinsecure" description:"Do not check TLS certificates from forward endpoint" toml:"forward_insecure"`
|
||||
|
||||
LogFile string `long:"logfile" description:"FILE to log download to" value-name:"FILE" toml:"log_file"`
|
||||
//lint:ignore SA5008 We are using choice or than once: debug, info, warn, error
|
||||
LogLevel logLevel `long:"loglevel" description:"LEVEL of logging details" value-name:"LEVEL" choice:"debug" choice:"info" choice:"warn" choice:"error" toml:"log_level"`
|
||||
|
||||
Config string `short:"c" long:"config" description:"Path to config TOML file" value-name:"TOML-FILE" toml:"-"`
|
||||
|
||||
clientCerts []tls.Certificate
|
||||
|
|
@ -87,6 +106,8 @@ func parseArgsConfig() ([]string, *config, error) {
|
|||
cfg.RemoteValidatorPresets = []string{defaultPreset}
|
||||
cfg.ValidationMode = defaultValidationMode
|
||||
cfg.ForwardQueue = defaultForwardQueue
|
||||
cfg.LogFile = defaultLogFile
|
||||
cfg.LogLevel = defaultLogLevel
|
||||
},
|
||||
// Re-establish default values if not set.
|
||||
EnsureDefaults: func(cfg *config) {
|
||||
|
|
@ -117,11 +138,94 @@ func (vm *validationMode) UnmarshalText(text []byte) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// UnmarshalText implements [encoding/text.TextUnmarshaler].
|
||||
func (ll *logLevel) UnmarshalText(text []byte) error {
|
||||
switch l := logLevel(text); l {
|
||||
case logLevelDebug, logLevelInfo, logLevelWarn, logLevelError:
|
||||
*ll = l
|
||||
default:
|
||||
return fmt.Errorf(`invalid value %q (expected "debug", "info", "warn", "error")`, l)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ignoreFile returns true if the given URL should not be downloaded.
|
||||
func (cfg *config) ignoreURL(u string) bool {
|
||||
return cfg.ignorePattern.Matches(u)
|
||||
}
|
||||
|
||||
// slogLevel converts logLevel to [slog.Level].
|
||||
func (ll logLevel) slogLevel() slog.Level {
|
||||
switch ll {
|
||||
case logLevelDebug:
|
||||
return slog.LevelDebug
|
||||
case logLevelInfo:
|
||||
return slog.LevelInfo
|
||||
case logLevelWarn:
|
||||
return slog.LevelWarn
|
||||
case logLevelError:
|
||||
return slog.LevelError
|
||||
default:
|
||||
return slog.LevelInfo
|
||||
}
|
||||
}
|
||||
|
||||
// prepareDirectory ensures that the working directory
|
||||
// exists and is setup properly.
|
||||
func (cfg *config) prepareDirectory() error {
|
||||
// If no special given use current working directory.
|
||||
if cfg.Directory == "" {
|
||||
dir, err := os.Getwd()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.Directory = dir
|
||||
return nil
|
||||
}
|
||||
// Use given directory
|
||||
if _, err := os.Stat(cfg.Directory); err != nil {
|
||||
// If it does not exist create it.
|
||||
if os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(cfg.Directory, 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// prepareLogging sets up the structured logging.
|
||||
func (cfg *config) prepareLogging() error {
|
||||
var w io.Writer
|
||||
if cfg.LogFile == "" {
|
||||
w = os.Stderr
|
||||
} else {
|
||||
var fname string
|
||||
// We put the log inside the download folder
|
||||
// if it is not absolute.
|
||||
if filepath.IsAbs(cfg.LogFile) {
|
||||
fname = cfg.LogFile
|
||||
} else {
|
||||
fname = filepath.Join(cfg.Directory, cfg.LogFile)
|
||||
}
|
||||
f, err := os.OpenFile(fname, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
w = f
|
||||
}
|
||||
ho := slog.HandlerOptions{
|
||||
//AddSource: true,
|
||||
Level: cfg.LogLevel.slogLevel(),
|
||||
}
|
||||
handler := slog.NewJSONHandler(w, &ho)
|
||||
logger := slog.New(handler)
|
||||
slog.SetDefault(logger)
|
||||
return nil
|
||||
}
|
||||
|
||||
// compileIgnorePatterns compiles the configure patterns to be ignored.
|
||||
func (cfg *config) compileIgnorePatterns() error {
|
||||
pm, err := filter.NewPatternMatcher(cfg.IgnorePattern)
|
||||
|
|
@ -145,8 +249,15 @@ func (cfg *config) prepareCertificates() error {
|
|||
|
||||
// prepare prepares internal state of a loaded configuration.
|
||||
func (cfg *config) prepare() error {
|
||||
if err := cfg.prepareCertificates(); err != nil {
|
||||
for _, prepare := range []func(*config) error{
|
||||
(*config).prepareDirectory,
|
||||
(*config).prepareLogging,
|
||||
(*config).prepareCertificates,
|
||||
(*config).compileIgnorePatterns,
|
||||
} {
|
||||
if err := prepare(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
return cfg.compileIgnorePatterns()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@
|
|||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
// SPDX-FileCopyrightText: 2022, 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022, 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
|
|
@ -19,7 +19,7 @@ import (
|
|||
"fmt"
|
||||
"hash"
|
||||
"io"
|
||||
"log"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
|
|
@ -31,21 +31,28 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"golang.org/x/time/rate"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/v2/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/v2/util"
|
||||
"golang.org/x/time/rate"
|
||||
)
|
||||
|
||||
type downloader struct {
|
||||
cfg *config
|
||||
directory string
|
||||
keys *crypto.KeyRing
|
||||
eval *util.PathEval
|
||||
validator csaf.RemoteValidator
|
||||
forwarder *forwarder
|
||||
mkdirMu sync.Mutex
|
||||
statsMu sync.Mutex
|
||||
stats stats
|
||||
}
|
||||
|
||||
// failedValidationDir is the name of the sub folder
|
||||
// where advisories are stored that fail validation in
|
||||
// unsafe mode.
|
||||
const failedValidationDir = "failed_validation"
|
||||
|
||||
func newDownloader(cfg *config) (*downloader, error) {
|
||||
|
||||
var validator csaf.RemoteValidator
|
||||
|
|
@ -78,6 +85,13 @@ func (d *downloader) close() {
|
|||
}
|
||||
}
|
||||
|
||||
// addStats add stats to total stats
|
||||
func (d *downloader) addStats(o *stats) {
|
||||
d.statsMu.Lock()
|
||||
defer d.statsMu.Unlock()
|
||||
d.stats.add(o)
|
||||
}
|
||||
|
||||
func (d *downloader) httpClient() util.Client {
|
||||
|
||||
hClient := http.Client{}
|
||||
|
|
@ -130,8 +144,9 @@ func (d *downloader) download(ctx context.Context, domain string) error {
|
|||
|
||||
if d.cfg.Verbose {
|
||||
for i := range lpmd.Messages {
|
||||
log.Printf("Loading provider-metadata.json for %q: %s\n",
|
||||
domain, lpmd.Messages[i].Message)
|
||||
slog.Info("Loading provider-metadata.json",
|
||||
"domain", domain,
|
||||
"message", lpmd.Messages[i].Message)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -247,7 +262,9 @@ func (d *downloader) loadOpenPGPKeys(
|
|||
}
|
||||
up, err := url.Parse(*key.URL)
|
||||
if err != nil {
|
||||
log.Printf("Invalid URL '%s': %v", *key.URL, err)
|
||||
slog.Warn("Invalid URL",
|
||||
"url", *key.URL,
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
@ -255,12 +272,18 @@ func (d *downloader) loadOpenPGPKeys(
|
|||
|
||||
res, err := client.Get(u)
|
||||
if err != nil {
|
||||
log.Printf("Fetching public OpenPGP key %s failed: %v.", u, err)
|
||||
slog.Warn(
|
||||
"Fetching public OpenPGP key failed",
|
||||
"url", u,
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
log.Printf("Fetching public OpenPGP key %s status code: %d (%s)",
|
||||
u, res.StatusCode, res.Status)
|
||||
slog.Warn(
|
||||
"Fetching public OpenPGP key failed",
|
||||
"url", u,
|
||||
"status_code", res.StatusCode,
|
||||
"status", res.Status)
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
@ -270,18 +293,25 @@ func (d *downloader) loadOpenPGPKeys(
|
|||
}()
|
||||
|
||||
if err != nil {
|
||||
log.Printf("Reading public OpenPGP key %s failed: %v", u, err)
|
||||
slog.Warn(
|
||||
"Reading public OpenPGP key failed",
|
||||
"url", u,
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) {
|
||||
log.Printf(
|
||||
"Fingerprint of public OpenPGP key %s does not match remotely loaded.", u)
|
||||
slog.Warn(
|
||||
"Fingerprint of public OpenPGP key does not match remotely loaded",
|
||||
"url", u)
|
||||
continue
|
||||
}
|
||||
if d.keys == nil {
|
||||
if keyring, err := crypto.NewKeyRing(ckey); err != nil {
|
||||
log.Printf("Creating store for public OpenPGP key %s failed: %v.", u, err)
|
||||
slog.Warn(
|
||||
"Creating store for public OpenPGP key failed",
|
||||
"url", u,
|
||||
"error", err)
|
||||
} else {
|
||||
d.keys = keyring
|
||||
}
|
||||
|
|
@ -295,16 +325,20 @@ func (d *downloader) loadOpenPGPKeys(
|
|||
// logValidationIssues logs the issues reported by the advisory schema validation.
|
||||
func (d *downloader) logValidationIssues(url string, errors []string, err error) {
|
||||
if err != nil {
|
||||
log.Printf("Failed to validate %s: %v", url, err)
|
||||
slog.Error("Failed to validate",
|
||||
"url", url,
|
||||
"error", err)
|
||||
return
|
||||
}
|
||||
if len(errors) > 0 {
|
||||
if d.cfg.Verbose {
|
||||
log.Printf("CSAF file %s has validation errors: %s\n",
|
||||
url, strings.Join(errors, ", "))
|
||||
slog.Error("CSAF file has validation errors",
|
||||
"url", url,
|
||||
"error", strings.Join(errors, ", "))
|
||||
} else {
|
||||
log.Printf("CSAF file %s has %d validation errors.\n",
|
||||
url, len(errors))
|
||||
slog.Error("CSAF file has validation errors",
|
||||
"url", url,
|
||||
"count", len(errors))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -325,8 +359,12 @@ func (d *downloader) downloadWorker(
|
|||
initialReleaseDate time.Time
|
||||
dateExtract = util.TimeMatcher(&initialReleaseDate, time.RFC3339)
|
||||
lower = strings.ToLower(string(label))
|
||||
stats = stats{}
|
||||
)
|
||||
|
||||
// Add collected stats back to total.
|
||||
defer d.addStats(&stats)
|
||||
|
||||
nextAdvisory:
|
||||
for {
|
||||
var file csaf.AdvisoryFile
|
||||
|
|
@ -342,41 +380,52 @@ nextAdvisory:
|
|||
|
||||
u, err := url.Parse(file.URL())
|
||||
if err != nil {
|
||||
log.Printf("Ignoring invalid URL: %s: %v\n", file.URL(), err)
|
||||
stats.downloadFailed++
|
||||
slog.Warn("Ignoring invalid URL",
|
||||
"url", file.URL(),
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if d.cfg.ignoreURL(file.URL()) {
|
||||
if d.cfg.Verbose {
|
||||
slog.Warn("Ignoring URL", "url", file.URL())
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Ignore not conforming filenames.
|
||||
filename := filepath.Base(u.Path)
|
||||
if !util.ConformingFileName(filename) {
|
||||
log.Printf("Not conforming filename %q. Ignoring.\n", filename)
|
||||
continue
|
||||
}
|
||||
|
||||
if d.cfg.ignoreURL(file.URL()) {
|
||||
if d.cfg.Verbose {
|
||||
log.Printf("Ignoring %q.\n", file.URL())
|
||||
}
|
||||
stats.filenameFailed++
|
||||
slog.Warn("Ignoring none conforming filename",
|
||||
"filename", filename)
|
||||
continue
|
||||
}
|
||||
|
||||
resp, err := client.Get(file.URL())
|
||||
if err != nil {
|
||||
log.Printf("WARN: cannot get '%s': %v\n", file.URL(), err)
|
||||
stats.downloadFailed++
|
||||
slog.Warn("Cannot GET",
|
||||
"url", file.URL(),
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
log.Printf("WARN: cannot load %s: %s (%d)\n",
|
||||
file.URL(), resp.Status, resp.StatusCode)
|
||||
stats.downloadFailed++
|
||||
slog.Warn("Cannot load",
|
||||
"url", file.URL(),
|
||||
"status", resp.Status,
|
||||
"status_code", resp.StatusCode)
|
||||
continue
|
||||
}
|
||||
|
||||
// Warn if we do not get JSON.
|
||||
if ct := resp.Header.Get("Content-Type"); ct != "application/json" {
|
||||
log.Printf(
|
||||
"WARN: The content type of %s should be 'application/json' but is '%s'\n",
|
||||
file.URL(), ct)
|
||||
slog.Warn("Content type is not 'application/json'",
|
||||
"url", file.URL(),
|
||||
"content_type", ct)
|
||||
}
|
||||
|
||||
var (
|
||||
|
|
@ -390,7 +439,9 @@ nextAdvisory:
|
|||
// Only hash when we have a remote counter part we can compare it with.
|
||||
if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil {
|
||||
if d.cfg.Verbose {
|
||||
log.Printf("WARN: cannot fetch %s: %v\n", file.SHA256URL(), err)
|
||||
slog.Warn("Cannot fetch SHA256",
|
||||
"url", file.SHA256URL(),
|
||||
"error", err)
|
||||
}
|
||||
} else {
|
||||
s256 = sha256.New()
|
||||
|
|
@ -399,7 +450,9 @@ nextAdvisory:
|
|||
|
||||
if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil {
|
||||
if d.cfg.Verbose {
|
||||
log.Printf("WARN: cannot fetch %s: %v\n", file.SHA512URL(), err)
|
||||
slog.Warn("Cannot fetch SHA512",
|
||||
"url", file.SHA512URL(),
|
||||
"error", err)
|
||||
}
|
||||
} else {
|
||||
s512 = sha512.New()
|
||||
|
|
@ -420,13 +473,17 @@ nextAdvisory:
|
|||
tee := io.TeeReader(resp.Body, hasher)
|
||||
return json.NewDecoder(tee).Decode(&doc)
|
||||
}(); err != nil {
|
||||
log.Printf("Downloading %s failed: %v", file.URL(), err)
|
||||
stats.downloadFailed++
|
||||
slog.Warn("Downloading failed",
|
||||
"url", file.URL(),
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Compare the checksums.
|
||||
s256Check := func() error {
|
||||
if s256 != nil && !bytes.Equal(s256.Sum(nil), remoteSHA256) {
|
||||
stats.sha256Failed++
|
||||
return fmt.Errorf("SHA256 checksum of %s does not match", file.URL())
|
||||
}
|
||||
return nil
|
||||
|
|
@ -434,12 +491,13 @@ nextAdvisory:
|
|||
|
||||
s512Check := func() error {
|
||||
if s512 != nil && !bytes.Equal(s512.Sum(nil), remoteSHA512) {
|
||||
stats.sha512Failed++
|
||||
return fmt.Errorf("SHA512 checksum of %s does not match", file.URL())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate OpenPGG signature.
|
||||
// Validate OpenPGP signature.
|
||||
keysCheck := func() error {
|
||||
// Only check signature if we have loaded keys.
|
||||
if d.keys == nil {
|
||||
|
|
@ -449,13 +507,15 @@ nextAdvisory:
|
|||
sign, signData, err = loadSignature(client, file.SignURL())
|
||||
if err != nil {
|
||||
if d.cfg.Verbose {
|
||||
log.Printf("downloading signature '%s' failed: %v\n",
|
||||
file.SignURL(), err)
|
||||
slog.Warn("Downloading signature failed",
|
||||
"url", file.SignURL(),
|
||||
"error", err)
|
||||
}
|
||||
}
|
||||
if sign != nil {
|
||||
if err := d.checkSignature(data.Bytes(), sign); err != nil {
|
||||
if !d.cfg.IgnoreSignatureCheck {
|
||||
stats.signatureFailed++
|
||||
return fmt.Errorf("cannot verify signature for %s: %v", file.URL(), err)
|
||||
}
|
||||
}
|
||||
|
|
@ -466,6 +526,7 @@ nextAdvisory:
|
|||
// Validate against CSAF schema.
|
||||
schemaCheck := func() error {
|
||||
if errors, err := csaf.ValidateCSAF(doc); err != nil || len(errors) > 0 {
|
||||
stats.schemaFailed++
|
||||
d.logValidationIssues(file.URL(), errors, err)
|
||||
return fmt.Errorf("schema validation for %q failed", file.URL())
|
||||
}
|
||||
|
|
@ -475,6 +536,7 @@ nextAdvisory:
|
|||
// Validate if filename is conforming.
|
||||
filenameCheck := func() error {
|
||||
if err := util.IDMatchesFilename(d.eval, doc, filename); err != nil {
|
||||
stats.filenameFailed++
|
||||
return fmt.Errorf("filename not conforming %s: %s", file.URL(), err)
|
||||
}
|
||||
return nil
|
||||
|
|
@ -493,6 +555,7 @@ nextAdvisory:
|
|||
return nil
|
||||
}
|
||||
if !rvr.Valid {
|
||||
stats.remoteFailed++
|
||||
return fmt.Errorf("remote validation of %q failed", file.URL())
|
||||
}
|
||||
return nil
|
||||
|
|
@ -509,8 +572,7 @@ nextAdvisory:
|
|||
remoteValidatorCheck,
|
||||
} {
|
||||
if err := check(); err != nil {
|
||||
// TODO: Improve logging.
|
||||
log.Printf("check failed: %v\n", err)
|
||||
slog.Error("Validation check failed", "error", err)
|
||||
valStatus.update(invalidValidationStatus)
|
||||
if d.cfg.ValidationMode == validationStrict {
|
||||
continue nextAdvisory
|
||||
|
|
@ -530,17 +592,26 @@ nextAdvisory:
|
|||
|
||||
if d.cfg.NoStore {
|
||||
// Do not write locally.
|
||||
if valStatus == validValidationStatus {
|
||||
stats.succeeded++
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if err := d.eval.Extract(`$.document.tracking.initial_release_date`, dateExtract, false, doc); err != nil {
|
||||
log.Printf("Cannot extract initial_release_date from advisory '%s'\n", file.URL())
|
||||
slog.Warn("Cannot extract initial_release_date from advisory",
|
||||
"url", file.URL())
|
||||
initialReleaseDate = time.Now()
|
||||
}
|
||||
initialReleaseDate = initialReleaseDate.UTC()
|
||||
|
||||
// Write advisory to file
|
||||
newDir := path.Join(d.directory, lower)
|
||||
// Advisories that failed validation are store in a special folder.
|
||||
var newDir string
|
||||
if valStatus != validValidationStatus {
|
||||
newDir = path.Join(d.cfg.Directory, failedValidationDir, lower)
|
||||
} else {
|
||||
newDir = path.Join(d.cfg.Directory, lower)
|
||||
}
|
||||
|
||||
// Do we have a configured destination folder?
|
||||
if d.cfg.Folder != "" {
|
||||
|
|
@ -557,6 +628,7 @@ nextAdvisory:
|
|||
lastDir = newDir
|
||||
}
|
||||
|
||||
// Write advisory to file
|
||||
path := filepath.Join(lastDir, filename)
|
||||
|
||||
// Write data to disk.
|
||||
|
|
@ -577,7 +649,8 @@ nextAdvisory:
|
|||
}
|
||||
}
|
||||
|
||||
log.Printf("Written advisory '%s'.\n", path)
|
||||
stats.succeeded++
|
||||
slog.Info("Written advisory", "path", path)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -633,40 +706,9 @@ func loadHash(client util.Client, p string) ([]byte, []byte, error) {
|
|||
return hash, data.Bytes(), nil
|
||||
}
|
||||
|
||||
// prepareDirectory ensures that the working directory
|
||||
// exists and is setup properly.
|
||||
func (d *downloader) prepareDirectory() error {
|
||||
// If no special given use current working directory.
|
||||
if d.cfg.Directory == nil {
|
||||
dir, err := os.Getwd()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.directory = dir
|
||||
return nil
|
||||
}
|
||||
// Use given directory
|
||||
if _, err := os.Stat(*d.cfg.Directory); err != nil {
|
||||
// If it does not exist create it.
|
||||
if os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(*d.cfg.Directory, 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
}
|
||||
d.directory = *d.cfg.Directory
|
||||
return nil
|
||||
}
|
||||
|
||||
// run performs the downloads for all the given domains.
|
||||
func (d *downloader) run(ctx context.Context, domains []string) error {
|
||||
|
||||
if err := d.prepareDirectory(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer d.stats.log()
|
||||
for _, domain := range domains {
|
||||
if err := d.download(ctx, domain); err != nil {
|
||||
return err
|
||||
|
|
|
|||
|
|
@ -12,9 +12,10 @@ import (
|
|||
"bytes"
|
||||
"crypto/tls"
|
||||
"io"
|
||||
"log"
|
||||
"log/slog"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
|
|
@ -22,6 +23,10 @@ import (
|
|||
"github.com/csaf-poc/csaf_distribution/v2/util"
|
||||
)
|
||||
|
||||
// failedForwardDir is the name of the special sub folder
|
||||
// where advisories get stored which fail forwarding.
|
||||
const failedForwardDir = "failed_forward"
|
||||
|
||||
// validationStatus represents the validation status
|
||||
// known to the HTTP endpoint.
|
||||
type validationStatus string
|
||||
|
|
@ -45,6 +50,9 @@ type forwarder struct {
|
|||
cfg *config
|
||||
cmds chan func(*forwarder)
|
||||
client util.Client
|
||||
|
||||
failed int
|
||||
succeeded int
|
||||
}
|
||||
|
||||
// newForwarder creates a new forwarder.
|
||||
|
|
@ -58,7 +66,7 @@ func newForwarder(cfg *config) *forwarder {
|
|||
|
||||
// run runs the forwarder. Meant to be used in a Go routine.
|
||||
func (f *forwarder) run() {
|
||||
defer log.Println("debug: forwarder done")
|
||||
defer slog.Debug("forwarder done")
|
||||
|
||||
for cmd := range f.cmds {
|
||||
cmd(f)
|
||||
|
|
@ -70,6 +78,15 @@ func (f *forwarder) close() {
|
|||
close(f.cmds)
|
||||
}
|
||||
|
||||
// log logs the current statistics.
|
||||
func (f *forwarder) log() {
|
||||
f.cmds <- func(f *forwarder) {
|
||||
slog.Info("Forward statistics",
|
||||
"succeeded", f.succeeded,
|
||||
"failed", f.failed)
|
||||
}
|
||||
}
|
||||
|
||||
// httpClient returns a cached HTTP client used for uploading
|
||||
// the advisories to the configured HTTP endpoint.
|
||||
func (f *forwarder) httpClient() util.Client {
|
||||
|
|
@ -113,15 +130,12 @@ func replaceExt(fname, nExt string) string {
|
|||
return fname[:len(fname)-len(ext)] + nExt
|
||||
}
|
||||
|
||||
// forward sends a given document with filename, status and
|
||||
// checksums to the forwarder. This is async to the degree
|
||||
// till the configured queue size is filled.
|
||||
func (f *forwarder) forward(
|
||||
// buildRequest creates an HTTP request suited ti forward the given advisory.
|
||||
func (f *forwarder) buildRequest(
|
||||
filename, doc string,
|
||||
status validationStatus,
|
||||
sha256, sha512 string,
|
||||
) {
|
||||
buildRequest := func() (*http.Request, error) {
|
||||
) (*http.Request, error) {
|
||||
body := new(bytes.Buffer)
|
||||
writer := multipart.NewWriter(body)
|
||||
|
||||
|
|
@ -165,24 +179,75 @@ func (f *forwarder) forward(
|
|||
contentType := writer.FormDataContentType()
|
||||
req.Header.Set("Content-Type", contentType)
|
||||
return req, nil
|
||||
}
|
||||
}
|
||||
|
||||
// storeFailedAdvisory stores an advisory in a special folder
|
||||
// in case the forwarding failed.
|
||||
func (f *forwarder) storeFailedAdvisory(filename, doc, sha256, sha512 string) error {
|
||||
dir := filepath.Join(f.cfg.Directory, failedForwardDir)
|
||||
// Create special folder if it does not exist.
|
||||
if _, err := os.Stat(dir); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
if err := os.MkdirAll(dir, 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// Store parts which are not empty.
|
||||
for _, x := range []struct {
|
||||
p string
|
||||
d string
|
||||
}{
|
||||
{filename, doc},
|
||||
{filename + ".sha256", sha256},
|
||||
{filename + ".sha512", sha512},
|
||||
} {
|
||||
if len(x.d) != 0 {
|
||||
path := filepath.Join(dir, x.p)
|
||||
if err := os.WriteFile(path, []byte(x.d), 0644); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// storeFailed is a logging wrapper around storeFailedAdvisory.
|
||||
func (f *forwarder) storeFailed(filename, doc, sha256, sha512 string) {
|
||||
f.failed++
|
||||
if err := f.storeFailedAdvisory(filename, doc, sha256, sha512); err != nil {
|
||||
slog.Error("Storing advisory failed forwarding failed",
|
||||
"error", err)
|
||||
}
|
||||
}
|
||||
|
||||
// forward sends a given document with filename, status and
|
||||
// checksums to the forwarder. This is async to the degree
|
||||
// till the configured queue size is filled.
|
||||
func (f *forwarder) forward(
|
||||
filename, doc string,
|
||||
status validationStatus,
|
||||
sha256, sha512 string,
|
||||
) {
|
||||
// Run this in the main loop of the forwarder.
|
||||
f.cmds <- func(f *forwarder) {
|
||||
req, err := buildRequest()
|
||||
req, err := f.buildRequest(filename, doc, status, sha256, sha512)
|
||||
if err != nil {
|
||||
// TODO: improve logging
|
||||
log.Printf("error: %v\n", err)
|
||||
slog.Error("building forward Request failed",
|
||||
"error", err)
|
||||
f.storeFailed(filename, doc, sha256, sha512)
|
||||
return
|
||||
}
|
||||
res, err := f.httpClient().Do(req)
|
||||
if err != nil {
|
||||
// TODO: improve logging
|
||||
log.Printf("error: %v\n", err)
|
||||
slog.Error("sending forward request failed",
|
||||
"error", err)
|
||||
f.storeFailed(filename, doc, sha256, sha512)
|
||||
return
|
||||
}
|
||||
if res.StatusCode != http.StatusCreated {
|
||||
// TODO: improve logging
|
||||
defer res.Body.Close()
|
||||
var msg strings.Builder
|
||||
io.Copy(&msg, io.LimitReader(res.Body, 512))
|
||||
|
|
@ -190,10 +255,16 @@ func (f *forwarder) forward(
|
|||
if msg.Len() >= 512 {
|
||||
dots = "..."
|
||||
}
|
||||
log.Printf("error: %s: %q (%d)\n",
|
||||
filename, msg.String()+dots, res.StatusCode)
|
||||
slog.Error("forwarding failed",
|
||||
"filename", filename,
|
||||
"body", msg.String()+dots,
|
||||
"status_code", res.StatusCode)
|
||||
f.storeFailed(filename, doc, sha256, sha512)
|
||||
} else {
|
||||
log.Printf("info: forwarding %q succeeded\n", filename)
|
||||
f.succeeded++
|
||||
slog.Debug(
|
||||
"forwarding succeeded",
|
||||
"filename", filename)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ package main
|
|||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/signal"
|
||||
|
||||
|
|
@ -33,7 +33,10 @@ func run(cfg *config, domains []string) error {
|
|||
if cfg.ForwardURL != "" {
|
||||
f := newForwarder(cfg)
|
||||
go f.run()
|
||||
defer f.close()
|
||||
defer func() {
|
||||
f.log()
|
||||
f.close()
|
||||
}()
|
||||
d.forwarder = f
|
||||
}
|
||||
|
||||
|
|
@ -47,7 +50,7 @@ func main() {
|
|||
options.ErrorCheck(cfg.prepare())
|
||||
|
||||
if len(domains) == 0 {
|
||||
log.Println("No domains given.")
|
||||
slog.Info("No domains given.")
|
||||
return
|
||||
}
|
||||
|
||||
|
|
|
|||
59
cmd/csaf_downloader/stats.go
Normal file
59
cmd/csaf_downloader/stats.go
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import "log/slog"
|
||||
|
||||
// stats contains counters of the downloads.
|
||||
type stats struct {
|
||||
downloadFailed int
|
||||
filenameFailed int
|
||||
schemaFailed int
|
||||
remoteFailed int
|
||||
sha256Failed int
|
||||
sha512Failed int
|
||||
signatureFailed int
|
||||
succeeded int
|
||||
}
|
||||
|
||||
// add adds other stats to this.
|
||||
func (st *stats) add(o *stats) {
|
||||
st.downloadFailed += o.downloadFailed
|
||||
st.filenameFailed += o.filenameFailed
|
||||
st.schemaFailed += o.schemaFailed
|
||||
st.remoteFailed += o.remoteFailed
|
||||
st.sha256Failed += o.sha256Failed
|
||||
st.sha512Failed += o.sha512Failed
|
||||
st.signatureFailed += o.signatureFailed
|
||||
st.succeeded += o.succeeded
|
||||
}
|
||||
|
||||
func (st *stats) totalFailed() int {
|
||||
return st.downloadFailed +
|
||||
st.filenameFailed +
|
||||
st.schemaFailed +
|
||||
st.remoteFailed +
|
||||
st.sha256Failed +
|
||||
st.sha512Failed +
|
||||
st.signatureFailed
|
||||
}
|
||||
|
||||
// log logs the collected stats.
|
||||
func (st *stats) log() {
|
||||
slog.Info("Download statistics",
|
||||
"succeeded", st.succeeded,
|
||||
"total_failed", st.totalFailed(),
|
||||
"filename_failed", st.filenameFailed,
|
||||
"download_failed", st.downloadFailed,
|
||||
"schema_failed", st.schemaFailed,
|
||||
"remote_failed", st.remoteFailed,
|
||||
"sha256_failed", st.sha256Failed,
|
||||
"sha512_failed", st.sha512Failed,
|
||||
"signature_failed", st.signatureFailed)
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue