From a1ea10baf9bb2325781383aa34c9f16ae1abbfd3 Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Fri, 1 Sep 2023 20:14:18 +0200 Subject: [PATCH 001/235] feat: added model for a CSAF advisory --- csaf/advisory.go | 1038 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1038 insertions(+) create mode 100644 csaf/advisory.go diff --git a/csaf/advisory.go b/csaf/advisory.go new file mode 100644 index 0000000..dc73902 --- /dev/null +++ b/csaf/advisory.go @@ -0,0 +1,1038 @@ +// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +package csaf + +import ( + "encoding/json" + "io" + "os" +) + +// Acknowledgement reflects the 'acknowledgement' object in the list of acknowledgements. +// It must at least have one property. +type Acknowledgement struct { + Names []*string `json:"names,omitempty"` + Organization *string `json:"organization,omitempty"` + Summary *string `json:"summary,omitempty"` + URLs []*string `json:"urls,omitempty"` +} + +// BranchCategory is the category of a branch. +type BranchCategory string + +const ( + // CSAFBranchCategoryArchitecture is the "architecture" category. + CSAFBranchCategoryArchitecture BranchCategory = "architecture" + // CSAFBranchCategoryHostName is the "host_name" category. + CSAFBranchCategoryHostName BranchCategory = "host_name" + // CSAFBranchCategoryLanguage is the "language" category. + CSAFBranchCategoryLanguage BranchCategory = "language" + // CSAFBranchCategoryLegacy is the "legacy" category. + CSAFBranchCategoryLegacy BranchCategory = "legacy" + // CSAFBranchCategoryPatchLevel is the "patch_level" category. + CSAFBranchCategoryPatchLevel BranchCategory = "patch_level" + // CSAFBranchCategoryProductFamily is the "product_family" category. + CSAFBranchCategoryProductFamily BranchCategory = "product_family" + // CSAFBranchCategoryProductName is the "product_name" category. + CSAFBranchCategoryProductName BranchCategory = "product_name" + // CSAFBranchCategoryProductVersion is the "product_version" category. + CSAFBranchCategoryProductVersion BranchCategory = "product_version" + // CSAFBranchCategoryProductVersionRange is the "product_version_range" category. + CSAFBranchCategoryProductVersionRange BranchCategory = "product_version_range" + // CSAFBranchCategoryServicePack is the "service_pack" category. + CSAFBranchCategoryServicePack BranchCategory = "service_pack" + // CSAFBranchCategorySpecification is the "specification" category. + CSAFBranchCategorySpecification BranchCategory = "specification" + // CSAFBranchCategoryVendor is the "vendor" category. + CSAFBranchCategoryVendor BranchCategory = "vendor" +) + +var csafBranchCategoryPattern = alternativesUnmarshal( + string(CSAFBranchCategoryArchitecture), + string(CSAFBranchCategoryHostName), + string(CSAFBranchCategoryLanguage), + string(CSAFBranchCategoryLegacy), + string(CSAFBranchCategoryPatchLevel), + string(CSAFBranchCategoryProductFamily), + string(CSAFBranchCategoryProductName), + string(CSAFBranchCategoryProductVersion), + string(CSAFBranchCategoryProductVersionRange), + string(CSAFBranchCategoryServicePack), + string(CSAFBranchCategorySpecification), + string(CSAFBranchCategoryVendor)) + +// ProductID is a reference token for product instances. There is no predefined or +// required format for it as long as it uniquely identifies a product in the context +// of the current document. +type ProductID string + +// Products is a list of one or more unique ProductID elements. +type Products []ProductID + +// FileHashValue represents the value of a hash. +type FileHashValue string + +var FileHashValuePattern = patternUnmarshal(`^[0-9a-fA-F]{32,}$`) + +// FileHash is checksum hash. +// Values for 'algorithm' are derived from the currently supported digests OpenSSL. Leading dashes were removed. +type FileHash struct { + Algorithm string `json:"algorithm"` // required, default: sha256 + Value string `json:"value"` // required +} + +// Hashes is a list of hashes. +type Hashes struct { + FileHashes []FileHash `json:"file_hashes"` // required + FileName string `json:"filename"` // required +} + +// CPE represents a Common Platform Enumeration in an advisory. +type CPE string + +var CPEPattern = patternUnmarshal("^(cpe:2\\.3:[aho\\*\\-](:(((\\?*|\\*?)([a-zA-Z0-9\\-\\._]|(\\\\[\\\\\\*\\?!\"#\\$%&'\\(\\)\\+,/:;<=>@\\[\\]\\^`\\{\\|\\}~]))+(\\?*|\\*?))|[\\*\\-])){5}(:(([a-zA-Z]{2,3}(-([a-zA-Z]{2}|[0-9]{3}))?)|[\\*\\-]))(:(((\\?*|\\*?)([a-zA-Z0-9\\-\\._]|(\\\\[\\\\\\*\\?!\"#\\$%&'\\(\\)\\+,/:;<=>@\\[\\]\\^`\\{\\|\\}~]))+(\\?*|\\*?))|[\\*\\-])){4})|([c][pP][eE]:/[AHOaho]?(:[A-Za-z0-9\\._\\-~%]*){0,6})$") + +// PURL represents a package URL in an advisory. +type PURL string + +var PURLPattern = patternUnmarshal(`^pkg:[A-Za-z\\.\\-\\+][A-Za-z0-9\\.\\-\\+]*/.+`) + +// XGenericURI represents an identifier for a product. +type XGenericURI struct { + Namespace string `json:"namespace"` // required + URI string `json:"uri"` // required +} + +// ProductIdentificationHelper bundles product identifier information. +// Supported formats for SBOMs are SPDX, CycloneDX, and SWID +type ProductIdentificationHelper struct { + CPE *CPE `json:"cpe,omitempty"` + Hashes *Hashes `json:"hashes,omitempty"` + ModelNumbers []*string `json:"model_numbers,omitempty"` // unique elements + PURL *PURL `json:"purl,omitempty"` + SBOMURLs []*string `json:"sbom_urls,omitempty"` + SerialNumbers []*string `json:"serial_numbers,omitempty"` // unique elements + SKUs []*string `json:"skus,omitempty"` + XGenericURIs []*XGenericURI `json:"x_generic_uris,omitempty"` +} + +// FullProductName is the full name of a product. +type FullProductName struct { + Name string `json:"name"` // required + ProductID ProductID `json:"product_id"` // required + ProductIdentificationHelper *ProductIdentificationHelper `json:"product_identification_helper,omitempty"` +} + +// Branch reflects the 'branch' object in the list of branches. +// It may contain either the property Branches OR Product. +// If the category is 'product_version' the name MUST NOT contain +// version ranges of any kind. +// If the category is 'product_version_range' the name MUST contain +// version ranges. +type Branch struct { + Branches []*Branch `json:"branches,omitempty"` + Category BranchCategory `json:"category"` // required + Name string `json:"name"` // required + Product *FullProductName `json:"product,omitempty"` +} + +// NoteCategory is the category of a note. +type NoteCategory string + +const ( + // CSAFNoteCategoryDescription is the "description" category. + CSAFNoteCategoryDescription NoteCategory = "description" + // CSAFNoteCategoryDetails is the "details" category. + CSAFNoteCategoryDetails NoteCategory = "details" + // CSAFNoteCategoryFaq is the "faq" category. + CSAFNoteCategoryFaq NoteCategory = "faq" + // CSAFNoteCategoryGeneral is the "general" category. + CSAFNoteCategoryGeneral NoteCategory = "general" + // CSAFNoteCategoryLegalDisclaimer is the "legal_disclaimer" category. + CSAFNoteCategoryLegalDisclaimer NoteCategory = "legal_disclaimer" + // CSAFNoteCategoryOther is the "other" category. + CSAFNoteCategoryOther NoteCategory = "other" + // CSAFNoteCategorySummary is the "summary" category. + CSAFNoteCategorySummary NoteCategory = "summary" +) + +var csafNoteCategoryPattern = alternativesUnmarshal( + string(CSAFNoteCategoryDescription), + string(CSAFNoteCategoryDetails), + string(CSAFNoteCategoryFaq), + string(CSAFNoteCategoryGeneral), + string(CSAFNoteCategoryLegalDisclaimer), + string(CSAFNoteCategoryOther), + string(CSAFNoteCategorySummary)) + +// Note reflects the 'Note' object of an advisory. +type Note struct { + Audience string `json:"audience,omitempty"` + NoteCategory *NoteCategory `json:"category"` // required + Text *string `json:"text"` // required + Title string `json:"title,omitempty"` +} + +// ReferenceCategory is the category of a note. +type ReferenceCategory string + +const ( + // CSAFReferenceCategoryExternal is the "external" category. + CSAFReferenceCategoryExternal ReferenceCategory = "external" + // CSAFReferenceCategorySelf is the "self" category. + CSAFReferenceCategorySelf ReferenceCategory = "self" +) + +var csafReferenceCategoryPattern = alternativesUnmarshal( + string(CSAFReferenceCategoryExternal), + string(CSAFReferenceCategorySelf)) + +// Reference holding any reference to conferences, papers, advisories, and other +// resources that are related and considered related to either a surrounding part of +// or the entire document and to be of value to the document consumer. +type Reference struct { + ReferenceCategory *string `json:"category"` // optional, default: external + Summary string `json:"summary"` // required + URL string `json:"url"` // required +} + +// AggregateSeverity stands for the urgency with which the vulnerabilities of an advisory +// (not a specific one) should be addressed. +type AggregateSeverity struct { + Namespace *string `json:"namespace,omitempty"` + Text string `json:"text"` // required +} + +// DocumentCategory represents a category of a document. +type DocumentCategory string + +var DocumentCategoryPattern = patternUnmarshal(`^[^\\s\\-_\\.](.*[^\\s\\-_\\.])?$`) + +// CSAFVersion is the version of a document. +type CSAFVersion string + +// CSAFVersion is the current version of CSAF. +const CSAFVersion20 CSAFVersion = "2.0" + +var CSAFVersionPattern = alternativesUnmarshal(string(CSAFVersion20)) + +// DocumentTLPLabel is the label of a TLP. +type DocumentTLPLabel string + +const ( + // DocumentTLPLabelAmber is the "AMBER" label. + DocumentTLPLabelAmber DocumentTLPLabel = "AMBER" + // DocumentTLPLabelGreen is the "GREEN" label. + DocumentTLPLabelGreen DocumentTLPLabel = "GREEN" + // DocumentTLPLabelRed is the "RED" label. + DocumentTLPLabelRed DocumentTLPLabel = "RED" + // DocumentTLPLabelWhite is the "WHITE" label. + DocumentTLPLabelWhite DocumentTLPLabel = "WHITE" +) + +var csafDocumentTLPLabelPattern = alternativesUnmarshal( + string(DocumentTLPLabelAmber), + string(DocumentTLPLabelGreen), + string(DocumentTLPLabelRed), + string(DocumentTLPLabelWhite)) + +// TLP provides details about the TLP classification of the document. +type TLP struct { + DocumentTLPLabel DocumentTLPLabel `json:"label"` // required + URL *string `json:"url,omitempty"` +} + +// DocumentDistribution describes rules for sharing a document. +type DocumentDistribution struct { + Text *string `json:"text,omitempty"` + TLP *TLP `json:"tlp,omitempty"` +} + +// PublisherCategory is the category of a publisher. +type PublisherCategory string + +const ( + // CSAFPublisherCategoryCoordinator is the "coordinator" category. + CSAFPublisherCategoryCoordinator PublisherCategory = "coordinator" + // CSAFPublisherCategoryDiscoverer is the "discoverer" category. + CSAFPublisherCategoryDiscoverer PublisherCategory = "discoverer" + // CSAFPublisherCategoryOther is the "other" category. + CSAFPublisherCategoryOther PublisherCategory = "other" + // CSAFPublisherCategoryTranslator is the "translator" category. + CSAFPublisherCategoryTranslator PublisherCategory = "translator" + // CSAFPublisherCategoryUser is the "user" category. + CSAFPublisherCategoryUser PublisherCategory = "user" + // CSAFPublisherCategoryVendor is the "vendor" category. + CSAFPublisherCategoryVendor PublisherCategory = "vendor" +) + +var csafPublisherCategoryPattern = alternativesUnmarshal( + string(CSAFPublisherCategoryCoordinator), + string(CSAFPublisherCategoryDiscoverer), + string(CSAFPublisherCategoryOther), + string(CSAFPublisherCategoryTranslator), + string(CSAFPublisherCategoryUser), + string(CSAFPublisherCategoryVendor)) + +// DocumentPublisher provides information about the publishing entity. +type DocumentPublisher struct { + Category PublisherCategory `json:"category"` // required + ContactDetails *string `json:"contact_details,omitempty"` + IssuingAuthority *string `json:"issuing_authority,omitempty"` + Name string `json:"name"` // required + Namespace string `json:"namespace"` // required +} + +// The version specifies a version string to denote clearly the evolution of the content of the document. +type Version string + +var VersionPattern = patternUnmarshal(`^(0|[1-9][0-9]*)$|^((0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*) + (?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?)$`) + +// Engine contains information about the engine that generated the CSAF document. +type Engine struct { + Name string `json:"name"` // required + Version *string `json:"version,omitempty"` +} + +// Generator holds elements related to the generation of the document. +// These items will reference when the document was actually created, +// including the date it was generated and the entity that generated it. +type Generator struct { + Date *string `json:"date,omitempty"` + Engine Engine `json:"engine"` // required +} + +// TrackingID is a unique identifier for the document. +type TrackingID string + +var TrackingIDPattern = patternUnmarshal("^[\\S](.*[\\S])?$") + +// Revision contains information about one revision of the document. +type Revision struct { + Date string `json:"date"` // required + LegacyVersion *string `json:"legacy_version,omitempty"` + Number Version `json:"number"` // required + Summary string `json:"summary"` // required +} + +// TrackingStatus is the category of a publisher. +type TrackingStatus string + +const ( + // CSAFTrackingStatusDraft is the "draft" category. + CSAFTrackingStatusDraft TrackingStatus = "draft" + // CSAFTrackingStatusFinal is the "final" category. + CSAFTrackingStatusFinal TrackingStatus = "final" + // CSAFTrackingStatusInterim is the "interim" category. + CSAFTrackingStatusInterim TrackingStatus = "interim" +) + +var csafTrackingStatusPattern = alternativesUnmarshal( + string(CSAFTrackingStatusDraft), + string(CSAFTrackingStatusFinal), + string(CSAFTrackingStatusInterim)) + +// Tracking holds information that is necessary to track a CSAF document. +type Tracking struct { + Aliases []*string `json:"aliases,omitempty"` // unique elements + CurrentReleaseDate string `json:"current_release_date"` // required + Generator *Generator `json:"generator"` + ID TrackingID `json:"id"` // required + InitialReleaseDate string `json:"initial_release_date"` // required + RevisionHistory []Revision `json:"revision_history"` // required + Status TrackingStatus `json:"status"` // required + Version Version `json:"version"` // required +} + +// Lang is a language identifier, corresponding to IETF BCP 47 / RFC 5646. +type Lang string + +var LangPattern = patternUnmarshal("^(([A-Za-z]{2,3}(-[A-Za-z]{3}(-[A-Za-z]{3}){0,2})?|[A-Za-z]{4,8})(-[A-Za-z]{4})?(-([A-Za-z]{2}|[0-9]{3}))?(-([A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(-[A-WY-Za-wy-z0-9](-[A-Za-z0-9]{2,8})+)*(-[Xx](-[A-Za-z0-9]{1,8})+)?|[Xx](-[A-Za-z0-9]{1,8})+|[Ii]-[Dd][Ee][Ff][Aa][Uu][Ll][Tt]|[Ii]-[Mm][Ii][Nn][Gg][Oo])$") + +// Document contains meta-data about an advisory. +type Document struct { + Acknowledgements []Acknowledgement `json:"acknowledgements,omitempty"` + AggregateSeverity *AggregateSeverity `json:"aggregate_severity,omitempty"` + Category DocumentCategory `json:"category"` // required + CSAFVersion CSAFVersion `json:"csaf_version"` // required + Distribution *DocumentDistribution `json:"distribution,omitempty"` + Lang *Lang `json:"lang,omitempty"` + Notes []*Note `json:"notes,omitempty"` + Publisher DocumentPublisher `json:"publisher"` // required + References []*Reference `json:"references,omitempty"` + SourceLang *Lang `json:"source_lang,omitempty"` + Title string `json:"title"` // required + Tracking Tracking `json:"tracking"` // required +} + +// ProductGroupID is a reference token for product group instances. +type ProductGroupID string + +// ProductGroup is a group of products in the document that belong to one group. +type ProductGroup struct { + GroupID string `json:"group_id"` // required + ProductIDs Products `json:"product_ids"` // required, two or more unique elements + Summary *string `json:"summary,omitempty"` +} + +// ProductGroups is a list of ProductGroupIDs +type ProductGroups struct { + ProductGroupIDs []ProductGroupID `json:"product_group_ids"` // unique elements +} + +// RelationshipCategory is the category of a relationship. +type RelationshipCategory string + +const ( + // CSAFRelationshipCategoryDefaultComponentOf is the "default_component_of" category. + CSAFRelationshipCategoryDefaultComponentOf RelationshipCategory = "default_component_of" + // CSAFRelationshipCategoryExternalComponentOf is the "external_component_of" category. + CSAFRelationshipCategoryExternalComponentOf RelationshipCategory = "external_component_of" + // CSAFRelationshipCategoryInstalledOn is the "installed_on" category. + CSAFRelationshipCategoryInstalledOn RelationshipCategory = "installed_on" + // CSAFRelationshipCategoryInstalledWith is the "installed_with" category. + CSAFRelationshipCategoryInstalledWith RelationshipCategory = "installed_with" + // CSAFRelationshipCategoryOptionalComponentOf is the "optional_component_of" category. + CSAFRelationshipCategoryOptionalComponentOf RelationshipCategory = "optional_component_of" +) + +var csafRelationshipCategoryPattern = alternativesUnmarshal( + string(CSAFRelationshipCategoryDefaultComponentOf), + string(CSAFRelationshipCategoryExternalComponentOf), + string(CSAFRelationshipCategoryInstalledOn), + string(CSAFRelationshipCategoryInstalledWith), + string(CSAFRelationshipCategoryOptionalComponentOf)) + +// Relationship establishes a link between two existing FullProductName elements. +type Relationship struct { + Category RelationshipCategory `json:"category"` // required + FullProductName FullProductName `json:"full_product_name"` // required + ProductReference ProductID `json:"product_reference"` // required + RelatesToProductReference ProductID `json:"relates_to_product_reference"` // required + +} + +// ProductTree contains product names that can be referenced elsewhere in the document. +type ProductTree struct { + Branches []*Branch `json:"branches,omitempty"` + FullProductNames []*FullProductName `json:"full_product_name,omitempty"` + ProductGroups *ProductGroups `json:"product_groups,omitempty"` + RelationShips []*Relationship `json:"relationships,omitempty"` +} + +// CVE holds the MITRE standard Common Vulnerabilities and Exposures (CVE) tracking number for a vulnerability. +type CVE string + +var CVEPattern = patternUnmarshal("^CVE-[0-9]{4}-[0-9]{4,}$") + +// WeaknessID is the identifier of a weakness. +type WeaknessID string + +var WeaknessIDPattern = patternUnmarshal("^CWE-[1-9]\\d{0,5}$") + +// CWE holds the MITRE standard Common Weakness Enumeration (CWE) for the weakness associated. +type CWE struct { + ID WeaknessID `json:"id"` // required + Name string `json:"name"` // required +} + +// FlagLabel is the label of a flag for a vulnerability. +type FlagLabel string + +const ( + // CSAFFlagLabelComponentNotPresent is the "component_not_present" label. + CSAFFlagLabelComponentNotPresent FlagLabel = "component_not_present" + // CSAFFlagLabelInlineMitigationsAlreadyExist is the "inline_mitigations_already_exist" label. + CSAFFlagLabelInlineMitigationsAlreadyExist FlagLabel = "inline_mitigations_already_exist" + // CSAFFlagLabelVulnerableCodeCannotBeControlledByAdversary is the "vulnerable_code_cannot_be_controlled_by_adversary" label. + CSAFFlagLabelVulnerableCodeCannotBeControlledByAdversary FlagLabel = "vulnerable_code_cannot_be_controlled_by_adversary" + // CSAFFlagLabelVulnerableCodeNotInExecutePath is the "vulnerable_code_not_in_execute_path" label. + CSAFFlagLabelVulnerableCodeNotInExecutePath FlagLabel = "vulnerable_code_not_in_execute_path" + // CSAFFlagLabelVulnerableCodeNotPresent is the "vulnerable_code_not_present" label. + CSAFFlagLabelVulnerableCodeNotPresent FlagLabel = "vulnerable_code_not_present" +) + +var csafFlagLabelPattern = alternativesUnmarshal( + string(CSAFFlagLabelComponentNotPresent), + string(CSAFFlagLabelInlineMitigationsAlreadyExist), + string(CSAFFlagLabelVulnerableCodeCannotBeControlledByAdversary), + string(CSAFFlagLabelVulnerableCodeNotInExecutePath), + string(CSAFFlagLabelVulnerableCodeNotPresent)) + +// Flag contains product specific information in regard to this vulnerability as a single +// machine readable flag. For example, this could be a machine readable justification +// code why a product is not affected. +type Flag struct { + Date *string `json:"date,omitempty"` + GroupIds *ProductGroups `json:"group_ids,omitempty"` + Label FlagLabel `json:"label"` // required + ProductIds *Products `json:"product_ids,omitempty"` +} + +// VulnerabilityID is the identifier of a vulnerability. +type VulnerabilityID struct { + SystemName string `json:"system_name"` // required + Text string `json:"text"` // required +} + +// InvolvementParty is the party of an involvement. +type InvolvementParty string + +const ( + // CSAFInvolvementPartyCoordinator is the "coordinator" party. + CSAFInvolvementPartyCoordinator InvolvementParty = "coordinator" + // CSAFInvolvementPartyDiscoverer is the "discoverer" party. + CSAFInvolvementPartyDiscoverer InvolvementParty = "discoverer" + // CSAFInvolvementPartyOther is the "other" party. + CSAFInvolvementPartyOther InvolvementParty = "other" + // CSAFInvolvementPartyUser is the "user" party. + CSAFInvolvementPartyUser InvolvementParty = "user" + // CSAFInvolvementPartyVendor is the "vendor" party. + CSAFInvolvementPartyVendor InvolvementParty = "vendor" +) + +var csafInvolvementPartyPattern = alternativesUnmarshal( + string(CSAFInvolvementPartyCoordinator), + string(CSAFInvolvementPartyDiscoverer), + string(CSAFInvolvementPartyOther), + string(CSAFInvolvementPartyUser), + string(CSAFInvolvementPartyVendor)) + +// InvolvementStatus is the status of an involvement. +type InvolvementStatus string + +const ( + // CSAFInvolvementStatusCompleted is the "completed" status. + CSAFInvolvementStatusCompleted InvolvementStatus = "completed" + // CSAFInvolvementStatusContactAttempted is the "contact_attempted" status. + CSAFInvolvementStatusContactAttempted InvolvementStatus = "contact_attempted" + // CSAFInvolvementStatusDisputed is the "disputed" status. + CSAFInvolvementStatusDisputed InvolvementStatus = "disputed" + // CSAFInvolvementStatusInProgress is the "in_progress" status. + CSAFInvolvementStatusInProgress InvolvementStatus = "in_progress" + // CSAFInvolvementStatusNotContacted is the "not_contacted" status. + CSAFInvolvementStatusNotContacted InvolvementStatus = "not_contacted" + // CSAFInvolvementStatusOpen is the "open" status. + CSAFInvolvementStatusOpen InvolvementStatus = "open" +) + +var csafInvolvementStatusPattern = alternativesUnmarshal( + string(CSAFInvolvementStatusCompleted), + string(CSAFInvolvementStatusContactAttempted), + string(CSAFInvolvementStatusDisputed), + string(CSAFInvolvementStatusInProgress), + string(CSAFInvolvementStatusNotContacted), + string(CSAFInvolvementStatusOpen)) + +// Involvement is a container that allows the document producers to comment on the level of involvement +// (or engagement) of themselves (or third parties) in the vulnerability identification, scoping, and +// remediation process. It can also be used to convey the disclosure timeline. +// The ordered tuple of the values of party and date (if present) SHALL be unique within the involvements +// of a vulnerability. +type Involvement struct { + Date *string `json:"date,omitempty"` + Party InvolvementParty `json:"party"` // required + Status InvolvementStatus `json:"status"` // required + Summary *string `json:"summary,omitempty"` +} + +// ProductStatus contains different lists of ProductIDs which provide details on +// the status of the referenced product related to the current vulnerability. +type ProductStatus struct { + FirstAffected *Products `json:"first_affected,omitempty"` + FirstFixed *Products `json:"first_fixed,omitempty"` + Fixed *Products `json:"fixed,omitempty"` + KnownAffected *Products `json:"known_affected,omitempty"` + KnownNotAffected *Products `json:"known_not_affected,omitempty"` + LastAffected *Products `json:"last_affected,omitempty"` + Recommended *Products `json:"recommended,omitempty"` + UnderInvestigation *Products `json:"under_investigation,omitempty"` +} + +// RemediationCategory is the category of a remediation. +type RemediationCategory string + +const ( + // CSAFRemediationCategoryMitigation is the "mitigation" category. + CSAFRemediationCategoryMitigation RemediationCategory = "mitigation" + // CSAFRemediationCategoryNoFixPlanned is the "no_fix_planned" category. + CSAFRemediationCategoryNoFixPlanned RemediationCategory = "no_fix_planned" + // CSAFRemediationCategoryNoneAvailable is the "none_available" category. + CSAFRemediationCategoryNoneAvailable RemediationCategory = "none_available" + // CSAFRemediationCategoryVendorFix is the "vendor_fix" category. + CSAFRemediationCategoryVendorFix RemediationCategory = "vendor_fix" + // CSAFRemediationCategoryWorkaround is the "workaround" category. + CSAFRemediationCategoryWorkaround RemediationCategory = "workaround" +) + +var csafRemediationCategoryPattern = alternativesUnmarshal( + string(CSAFRemediationCategoryMitigation), + string(CSAFRemediationCategoryNoFixPlanned), + string(CSAFRemediationCategoryNoneAvailable), + string(CSAFRemediationCategoryVendorFix), + string(CSAFRemediationCategoryWorkaround)) + +// RestartRequiredCategory is the category of RestartRequired. +type RestartRequiredCategory string + +const ( + // CSAFRestartRequiredCategoryConnected is the "connected" category. + CSAFRestartRequiredCategoryConnected RestartRequiredCategory = "connected" + // CSAFRestartRequiredCategoryDependencies is the "dependencies" category. + CSAFRestartRequiredCategoryDependencies RestartRequiredCategory = "dependencies" + // CSAFRestartRequiredCategoryMachine is the "machine" category. + CSAFRestartRequiredCategoryMachine RestartRequiredCategory = "machine" + // CSAFRestartRequiredCategoryNone is the "none" category. + CSAFRestartRequiredCategoryNone RestartRequiredCategory = "none" + // CSAFRestartRequiredCategoryParent is the "parent" category. + CSAFRestartRequiredCategoryParent RestartRequiredCategory = "parent" + // CSAFRestartRequiredCategoryService is the "service" category. + CSAFRestartRequiredCategoryService RestartRequiredCategory = "service" + // CSAFRestartRequiredCategorySystem is the "system" category. + CSAFRestartRequiredCategorySystem RestartRequiredCategory = "system" + // CSAFRestartRequiredCategoryVulnerableComponent is the "vulnerable_component" category. + CSAFRestartRequiredCategoryVulnerableComponent RestartRequiredCategory = "vulnerable_component" + // CSAFRestartRequiredCategoryZone is the "zone" category. + CSAFRestartRequiredCategoryZone RestartRequiredCategory = "zone" +) + +var csafRestartRequiredCategoryPattern = alternativesUnmarshal( + string(CSAFRestartRequiredCategoryConnected), + string(CSAFRestartRequiredCategoryDependencies), + string(CSAFRestartRequiredCategoryMachine), + string(CSAFRestartRequiredCategoryNone), + string(CSAFRestartRequiredCategoryParent), + string(CSAFRestartRequiredCategoryService), + string(CSAFRestartRequiredCategorySystem), + string(CSAFRestartRequiredCategoryVulnerableComponent), + string(CSAFRestartRequiredCategoryZone)) + +// RestartRequired provides information on category of restart is required by this remediation to become +// effective. +type RestartRequired struct { + Category RestartRequiredCategory `json:"category"` // required + Details *string `json:"details,omitempty"` +} + +// Remediation specifies details on how to handle (and presumably, fix) a vulnerability. +type Remediation struct { + Category *RemediationCategory `json:"category"` // required + Date *string `json:"date,omitempty"` + Details *string `json:"details"` // required + Entitlements []*string `json:"entitlements,omitempty"` + GroupIds *ProductGroups `json:"group_ids,omitempty"` + ProductIds *Products `json:"product_ids,omitempty"` + RestartRequired *RestartRequired `json:"restart_required,omitempty"` + URL *string `json:"url,omitempty"` +} + +// CVSSv3Version is the version of a CVSSv3 item. +type CVSSv3Version string + +// CVSSv3Version30 is version 3.0 of a CVSSv3 item. +const CVSSv3Version30 CVSSv3Version = "3.0" + +// CVSSv3Version31 is version 3.1 of a CVSSv3 item. +const CVSSv3Version31 CVSSv3Version = "3.1" + +// CVSSv3VectorString is the VectorString of a CVSSv3 item with version 3.x. +type CVSSv3VectorString string + +var CVSSv3VectorStringPattern = patternUnmarshal(`^CVSS:3[.][01]/((AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$`) + +var CVSSv3VersionPattern = alternativesUnmarshal( + string(CVSSv3Version30), + string(CVSSv3Version31)) + +// CVSSv2 holding a CVSS v2.0 value +type CVSSv2 struct { + Version string `json:"version"` // required + VectorString string `json:"vectorString"` // required + AccessVector *string `json:"accessVector"` + AccessComplexity *string `json:"accessComplexity"` + Authentication *string `json:"authentication"` + ConfidentialityImpact *string `json:"confidentialityImpact"` + IntegrityImpact *string `json:"integrityImpact"` + AvailabilityImpact *string `json:"availabilityImpact"` + BaseScore float64 `json:"baseScore"` // required + Exploitability *string `json:"exploitability"` + RemediationLevel *string `json:"remediationLevel"` + ReportConfidence *string `json:"reportConfidence"` + TemporalScore *string `json:"temporalScore"` + CollateralDamagePotential *string `json:"collateralDamagePotential"` + TargetDistribution *string `json:"targetDistribution"` + ConfidentialityRequirement *string `json:"confidentialityRequirement"` + IntegrityRequirement *string `json:"integrityRequirement"` + AvailabilityRequirement *string `json:"availabilityRequirement"` + EnvironmentalScore *string `json:"environmentalScore"` +} + +// CVSSv3 holding a CVSS v3.x value +type CVSSv3 struct { + Version string `json:"version"` // required + VectorString string `json:"vectorString"` // required + AttackVector *string `json:"attackVector"` + AttackComplexity *string `json:"attackComplexity"` + PrivilegesRequired *string `json:"privilegesRequired"` + UserInteraction *string `json:"userInteraction"` + Scope *string `json:"scope"` + ConfidentialityImpact *string `json:"confidentialityImpact"` + IntegrityImpact string `json:"integrityImpact"` + AvailabilityImpact *string `json:"availabilityImpact"` + BaseScore float64 `json:"baseScore"` // required + BaseSeverity string `json:"baseSeverity"` // required + ExploitCodeMaturity *string `json:"exploitCodeMaturity"` + RemediationLevel *string `json:"remediationLevel"` + ReportConfidence *string `json:"reportConfidence"` + TemporalScore *string `json:"temporalScore"` + TemporalSeverity *string `json:"temporalSeverity"` + ConfidentialityRequirement *string `json:"confidentialityRequirement"` + IntegrityRequirement *string `json:"integrityRequirement"` + AvailabilityRequirement *string `json:"availabilityRequirement"` + ModifiedAttackVector *string `json:"modifiedAttackVector"` + ModifiedAttackComplexity *string `json:"modifiedAttackComplexity"` + ModifiedPrivilegesRequired *string `json:"modifiedPrivilegesRequired"` + ModifiedUserInteraction *string `json:"modifiedUserInteraction"` + ModifiedScope *string `json:"modifiedScope"` + ModifiedConfidentialityImpact *string `json:"modifiedConfidentialityImpact"` + ModifiedIntegrityImpact *string `json:"modifiedIntegrityImpact"` + ModifiedAvailabilityImpact *string `json:"modifiedAvailabilityImpact"` + EenvironmentalScore *string `json:"environmentalScore"` + EnvironmentalSeverity *string `json:"environmentalSeverity"` +} + +// Score specifies information about (at least one) score of the vulnerability and for which +// products the given value applies. A Score item has at least 2 properties. +type Score struct { + CVSSv2 *CVSSv2 `json:"cvss_v2,omitempty"` + CVSSv3 *CVSSv3 `json:"cvss_v3,omitempty"` + Products *Products `json:"products"` // required +} + +// ThreatCategory is the category of a threat. +type ThreatCategory string + +const ( + // CSAFThreatCategoryExploitStatus is the "exploit_status" category. + CSAFThreatCategoryExploitStatus ThreatCategory = "exploit_status" + // CSAFThreatCategoryImpact is the "impact" category. + CSAFThreatCategoryImpact ThreatCategory = "impact" + // CSAFThreatCategoryTargetSet is the "target_set" category. + CSAFThreatCategoryTargetSet ThreatCategory = "target_set" +) + +var csafThreatCategoryPattern = alternativesUnmarshal( + string(CSAFThreatCategoryExploitStatus), + string(CSAFThreatCategoryImpact), + string(CSAFThreatCategoryTargetSet)) + +// Threat contains information about a vulnerability that can change with time. +type Threat struct { + Category ThreatCategory `json:"category"` // required + Date *string `json:"date,omitempty"` + Details string `json:"details"` // required + GroupIds *ProductGroups `json:"group_ids,omitempty"` + ProductIds *Products `json:"product_ids,omitempty"` +} + +// Vulnerability contains all fields that are related to a single vulnerability in the document. +type Vulnerability struct { + Acknowledgements []*Acknowledgement `json:"acknowledgements,omitempty"` + CVE *CVE `json:"cve,omitempty"` + CWE *CWE `json:"cwe,omitempty"` + DiscoveryDate *string `json:"discovery_date,omitempty"` + Flags []*Flag `json:"flags,omitempty"` + Ids []*VulnerabilityID `json:"ids,omitempty"` // unique ID elements + Involvements []*Involvement `json:"involvements,omitempty"` + Notes []*Note `json:"notes,omitempty"` + ProductStatus *ProductStatus `json:"product_status,omitempty"` + References []*Reference `json:"references,omitempty"` + ReleaseDate *string `json:"release_date,omitempty"` + Remediations []*Remediation `json:"remediations,omitempty"` + Scores []*Score `json:"scores,omitempty"` + Threats []*Threat `json:"threats,omitempty"` + Title *string `json:"title,omitempty"` +} + +// Advisory represents a CSAF advisory. +type Advisory struct { + Document Document `json:"document"` // required + ProductTree *ProductTree `json:"product_tree,omitempty"` + Vulnerabilities []*Vulnerability `json:"vulnerabilities,omitempty"` +} + +// Validate checks if the advisory is valid. +// Returns an error if the validation fails otherwise nil. +func (adv *Advisory) Validate() error { + // TODO + return nil +} + +// LoadAdvisory loads an advisory from a file. +func LoadAdvisory(fname string) (*Advisory, error) { + f, err := os.Open(fname) + if err != nil { + return nil, err + } + + var advisory Advisory + dec := json.NewDecoder(f) + if err := dec.Decode(&advisory); err != nil { + return nil, err + } + defer f.Close() + + return &advisory, nil +} + +// SaveAdvisory writes the JSON encoding of the given advisory to a +// file with the given name. +// It returns nil, otherwise an error. +func SaveAdvisory(adv *Advisory, fname string) error { + var w io.WriteCloser + f, err := os.Create(fname) + if err != nil { + return err + } + w = f + + enc := json.NewEncoder(w) + enc.SetIndent("", " ") + err = enc.Encode(adv) + if e := w.Close(); err != nil { + err = e + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (bc *BranchCategory) UnmarshalText(data []byte) error { + s, err := csafBranchCategoryPattern(data) + if err == nil { + *bc = BranchCategory(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (nc *NoteCategory) UnmarshalText(data []byte) error { + s, err := csafNoteCategoryPattern(data) + if err == nil { + *nc = NoteCategory(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (rc *ReferenceCategory) UnmarshalText(data []byte) error { + s, err := csafReferenceCategoryPattern(data) + if err == nil { + *rc = ReferenceCategory(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (dtl *DocumentTLPLabel) UnmarshalText(data []byte) error { + s, err := csafDocumentTLPLabelPattern(data) + if err == nil { + *dtl = DocumentTLPLabel(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (pc *PublisherCategory) UnmarshalText(data []byte) error { + s, err := csafPublisherCategoryPattern(data) + if err == nil { + *pc = PublisherCategory(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (ts *TrackingStatus) UnmarshalText(data []byte) error { + s, err := csafTrackingStatusPattern(data) + if err == nil { + *ts = TrackingStatus(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (rc *RelationshipCategory) UnmarshalText(data []byte) error { + s, err := csafRelationshipCategoryPattern(data) + if err == nil { + *rc = RelationshipCategory(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (fl *FlagLabel) UnmarshalText(data []byte) error { + s, err := csafFlagLabelPattern(data) + if err == nil { + *fl = FlagLabel(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (ip *InvolvementParty) UnmarshalText(data []byte) error { + s, err := csafInvolvementPartyPattern(data) + if err == nil { + *ip = InvolvementParty(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (is *InvolvementStatus) UnmarshalText(data []byte) error { + s, err := csafInvolvementStatusPattern(data) + if err == nil { + *is = InvolvementStatus(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (rc *RemediationCategory) UnmarshalText(data []byte) error { + s, err := csafRemediationCategoryPattern(data) + if err == nil { + *rc = RemediationCategory(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (rrc *RestartRequiredCategory) UnmarshalText(data []byte) error { + s, err := csafRestartRequiredCategoryPattern(data) + if err == nil { + *rrc = RestartRequiredCategory(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (tc *ThreatCategory) UnmarshalText(data []byte) error { + s, err := csafThreatCategoryPattern(data) + if err == nil { + *tc = ThreatCategory(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (cpe *CPE) UnmarshalText(data []byte) error { + s, err := CPEPattern(data) + if err == nil { + *cpe = CPE(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (fhv *FileHashValue) UnmarshalText(data []byte) error { + s, err := FileHashValuePattern(data) + if err == nil { + *fhv = FileHashValue(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (p *PURL) UnmarshalText(data []byte) error { + s, err := PURLPattern(data) + if err == nil { + *p = PURL(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (l *Lang) UnmarshalText(data []byte) error { + s, err := LangPattern(data) + if err == nil { + *l = Lang(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (v *Version) UnmarshalText(data []byte) error { + s, err := VersionPattern(data) + if err == nil { + *v = Version(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (dc *DocumentCategory) UnmarshalText(data []byte) error { + s, err := DocumentCategoryPattern(data) + if err == nil { + *dc = DocumentCategory(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (cv *CSAFVersion) UnmarshalText(data []byte) error { + s, err := CSAFVersionPattern(data) + if err == nil { + *cv = CSAFVersion(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (ti *TrackingID) UnmarshalText(data []byte) error { + s, err := TrackingIDPattern(data) + if err == nil { + *ti = TrackingID(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (cve *CVE) UnmarshalText(data []byte) error { + s, err := CVEPattern(data) + if err == nil { + *cve = CVE(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (wi *WeaknessID) UnmarshalText(data []byte) error { + s, err := WeaknessIDPattern(data) + if err == nil { + *wi = WeaknessID(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (cv *CVSSv3Version) UnmarshalText(data []byte) error { + s, err := CVSSv3VersionPattern(data) + if err == nil { + *cv = CVSSv3Version(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (cvs *CVSSv3VectorString) UnmarshalText(data []byte) error { + s, err := CVSSv3VectorStringPattern(data) + if err == nil { + *cvs = CVSSv3VectorString(s) + } + return err +} From 96608a07fefbb7ba1b487372914d4b5dd066fe12 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Sat, 2 Sep 2023 17:03:46 +0200 Subject: [PATCH 002/235] Move defer.Close to right position. --- csaf/advisory.go | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index dc73902..8d52636 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -781,14 +781,11 @@ func LoadAdvisory(fname string) (*Advisory, error) { if err != nil { return nil, err } - + defer f.Close() var advisory Advisory - dec := json.NewDecoder(f) - if err := dec.Decode(&advisory); err != nil { + if err := json.NewDecoder(f).Decode(&advisory); err != nil { return nil, err } - defer f.Close() - return &advisory, nil } From e82168342302e4b4d7f7c8d6b70b37a2adebbc5d Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Sat, 2 Sep 2023 17:38:12 +0200 Subject: [PATCH 003/235] Add generator for CVSS enums --- README.md | 6 + csaf/advisory.go | 98 +++---- csaf/cvss20enums.go | 314 ++++++++++++++++++++++ csaf/cvss30enums.go | 500 ++++++++++++++++++++++++++++++++++++ csaf/cvss31enums.go | 500 ++++++++++++++++++++++++++++++++++++ csaf/doc.go | 4 + csaf/generate_cvss_enums.go | 152 +++++++++++ 7 files changed, 1525 insertions(+), 49 deletions(-) create mode 100644 csaf/cvss20enums.go create mode 100644 csaf/cvss30enums.go create mode 100644 csaf/cvss31enums.go create mode 100644 csaf/generate_cvss_enums.go diff --git a/README.md b/README.md index 09aa341..a03cd3a 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,12 @@ Download the binaries from the most recent release assets on Github. Binaries will be placed in directories named like `bin-linux-amd64/` and `bin-windows-amd64/`. +- Maintainers only: No need to do this if you have cloned this repository for unmodified usage only. +``` +go generate ./... +``` +will update the machine generated code. + ### Setup (Trusted Provider) diff --git a/csaf/advisory.go b/csaf/advisory.go index dc73902..9895f5b 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -653,59 +653,59 @@ var CVSSv3VersionPattern = alternativesUnmarshal( // CVSSv2 holding a CVSS v2.0 value type CVSSv2 struct { - Version string `json:"version"` // required - VectorString string `json:"vectorString"` // required - AccessVector *string `json:"accessVector"` - AccessComplexity *string `json:"accessComplexity"` - Authentication *string `json:"authentication"` - ConfidentialityImpact *string `json:"confidentialityImpact"` - IntegrityImpact *string `json:"integrityImpact"` - AvailabilityImpact *string `json:"availabilityImpact"` - BaseScore float64 `json:"baseScore"` // required - Exploitability *string `json:"exploitability"` - RemediationLevel *string `json:"remediationLevel"` - ReportConfidence *string `json:"reportConfidence"` - TemporalScore *string `json:"temporalScore"` - CollateralDamagePotential *string `json:"collateralDamagePotential"` - TargetDistribution *string `json:"targetDistribution"` - ConfidentialityRequirement *string `json:"confidentialityRequirement"` - IntegrityRequirement *string `json:"integrityRequirement"` - AvailabilityRequirement *string `json:"availabilityRequirement"` - EnvironmentalScore *string `json:"environmentalScore"` + Version string `json:"version"` // required + VectorString string `json:"vectorString"` // required + AccessVector *CVSS20AccessVector `json:"accessVector"` + AccessComplexity *CVSS20AccessComplexity `json:"accessComplexity"` + Authentication *CVSS20Authentication `json:"authentication"` + ConfidentialityImpact *string `json:"confidentialityImpact"` + IntegrityImpact *string `json:"integrityImpact"` + AvailabilityImpact *string `json:"availabilityImpact"` + BaseScore float64 `json:"baseScore"` // required + Exploitability *CVSS20Exploitability `json:"exploitability"` + RemediationLevel *CVSS20RemediationLevel `json:"remediationLevel"` + ReportConfidence *string `json:"reportConfidence"` + TemporalScore *string `json:"temporalScore"` + CollateralDamagePotential *CVSS20CollateralDamagePotential `json:"collateralDamagePotential"` + TargetDistribution *CVSS20TargetDistribution `json:"targetDistribution"` + ConfidentialityRequirement *string `json:"confidentialityRequirement"` + IntegrityRequirement *string `json:"integrityRequirement"` + AvailabilityRequirement *string `json:"availabilityRequirement"` + EnvironmentalScore *string `json:"environmentalScore"` } // CVSSv3 holding a CVSS v3.x value type CVSSv3 struct { - Version string `json:"version"` // required - VectorString string `json:"vectorString"` // required - AttackVector *string `json:"attackVector"` - AttackComplexity *string `json:"attackComplexity"` - PrivilegesRequired *string `json:"privilegesRequired"` - UserInteraction *string `json:"userInteraction"` - Scope *string `json:"scope"` - ConfidentialityImpact *string `json:"confidentialityImpact"` - IntegrityImpact string `json:"integrityImpact"` - AvailabilityImpact *string `json:"availabilityImpact"` - BaseScore float64 `json:"baseScore"` // required - BaseSeverity string `json:"baseSeverity"` // required - ExploitCodeMaturity *string `json:"exploitCodeMaturity"` - RemediationLevel *string `json:"remediationLevel"` - ReportConfidence *string `json:"reportConfidence"` - TemporalScore *string `json:"temporalScore"` - TemporalSeverity *string `json:"temporalSeverity"` - ConfidentialityRequirement *string `json:"confidentialityRequirement"` - IntegrityRequirement *string `json:"integrityRequirement"` - AvailabilityRequirement *string `json:"availabilityRequirement"` - ModifiedAttackVector *string `json:"modifiedAttackVector"` - ModifiedAttackComplexity *string `json:"modifiedAttackComplexity"` - ModifiedPrivilegesRequired *string `json:"modifiedPrivilegesRequired"` - ModifiedUserInteraction *string `json:"modifiedUserInteraction"` - ModifiedScope *string `json:"modifiedScope"` - ModifiedConfidentialityImpact *string `json:"modifiedConfidentialityImpact"` - ModifiedIntegrityImpact *string `json:"modifiedIntegrityImpact"` - ModifiedAvailabilityImpact *string `json:"modifiedAvailabilityImpact"` - EenvironmentalScore *string `json:"environmentalScore"` - EnvironmentalSeverity *string `json:"environmentalSeverity"` + Version string `json:"version"` // required + VectorString string `json:"vectorString"` // required + AttackVector *string `json:"attackVector"` + AttackComplexity *CVSS30AttackComplexity `json:"attackComplexity"` + PrivilegesRequired *CVSS30PrivilegesRequired `json:"privilegesRequired"` + UserInteraction *CVSS30UserInteraction `json:"userInteraction"` + Scope *CVSS30Scope `json:"scope"` + ConfidentialityImpact *string `json:"confidentialityImpact"` + IntegrityImpact string `json:"integrityImpact"` + AvailabilityImpact *string `json:"availabilityImpact"` + BaseScore float64 `json:"baseScore"` // required + BaseSeverity string `json:"baseSeverity"` // required + ExploitCodeMaturity *CVSS30ExploitCodeMaturity `json:"exploitCodeMaturity"` + RemediationLevel *CVSS30RemediationLevel `json:"remediationLevel"` + ReportConfidence *string `json:"reportConfidence"` + TemporalScore *string `json:"temporalScore"` + TemporalSeverity *string `json:"temporalSeverity"` + ConfidentialityRequirement *string `json:"confidentialityRequirement"` + IntegrityRequirement *string `json:"integrityRequirement"` + AvailabilityRequirement *string `json:"availabilityRequirement"` + ModifiedAttackVector *CVSS30ModifiedAttackVector `json:"modifiedAttackVector"` + ModifiedAttackComplexity *CVSS30ModifiedAttackComplexity `json:"modifiedAttackComplexity"` + ModifiedPrivilegesRequired *string `json:"modifiedPrivilegesRequired"` + ModifiedUserInteraction *string `json:"modifiedUserInteraction"` + ModifiedScope *string `json:"modifiedScope"` + ModifiedConfidentialityImpact *string `json:"modifiedConfidentialityImpact"` + ModifiedIntegrityImpact *string `json:"modifiedIntegrityImpact"` + ModifiedAvailabilityImpact *string `json:"modifiedAvailabilityImpact"` + EenvironmentalScore *string `json:"environmentalScore"` + EnvironmentalSeverity *string `json:"environmentalSeverity"` } // Score specifies information about (at least one) score of the vulnerability and for which diff --git a/csaf/cvss20enums.go b/csaf/cvss20enums.go new file mode 100644 index 0000000..8862e8e --- /dev/null +++ b/csaf/cvss20enums.go @@ -0,0 +1,314 @@ +// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH +// +// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! + +package csaf + +// CVSS20AccessComplexity represents the accessComplexityType in CVSS20. +type CVSS20AccessComplexity string + +const ( + // CVSS20AccessComplexityHigh is a constant for "HIGH". + CVSS20AccessComplexityHigh CVSS20AccessComplexity = "HIGH" + // CVSS20AccessComplexityMedium is a constant for "MEDIUM". + CVSS20AccessComplexityMedium CVSS20AccessComplexity = "MEDIUM" + // CVSS20AccessComplexityLow is a constant for "LOW". + CVSS20AccessComplexityLow CVSS20AccessComplexity = "LOW" +) + +var cvss20AccessComplexityPattern = alternativesUnmarshal( + string(CVSS20AccessComplexityHigh), + string(CVSS20AccessComplexityMedium), + string(CVSS20AccessComplexityLow), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS20AccessComplexity) UnmarshalText(data []byte) error { + s, err := cvss20AccessComplexityPattern(data) + if err == nil { + *e = CVSS20AccessComplexity(s) + } + return err +} + +// CVSS20AccessVector represents the accessVectorType in CVSS20. +type CVSS20AccessVector string + +const ( + // CVSS20AccessVectorNetwork is a constant for "NETWORK". + CVSS20AccessVectorNetwork CVSS20AccessVector = "NETWORK" + // CVSS20AccessVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK". + CVSS20AccessVectorAdjacentNetwork CVSS20AccessVector = "ADJACENT_NETWORK" + // CVSS20AccessVectorLocal is a constant for "LOCAL". + CVSS20AccessVectorLocal CVSS20AccessVector = "LOCAL" +) + +var cvss20AccessVectorPattern = alternativesUnmarshal( + string(CVSS20AccessVectorNetwork), + string(CVSS20AccessVectorAdjacentNetwork), + string(CVSS20AccessVectorLocal), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS20AccessVector) UnmarshalText(data []byte) error { + s, err := cvss20AccessVectorPattern(data) + if err == nil { + *e = CVSS20AccessVector(s) + } + return err +} + +// CVSS20Authentication represents the authenticationType in CVSS20. +type CVSS20Authentication string + +const ( + // CVSS20AuthenticationMultiple is a constant for "MULTIPLE". + CVSS20AuthenticationMultiple CVSS20Authentication = "MULTIPLE" + // CVSS20AuthenticationSingle is a constant for "SINGLE". + CVSS20AuthenticationSingle CVSS20Authentication = "SINGLE" + // CVSS20AuthenticationNone is a constant for "NONE". + CVSS20AuthenticationNone CVSS20Authentication = "NONE" +) + +var cvss20AuthenticationPattern = alternativesUnmarshal( + string(CVSS20AuthenticationMultiple), + string(CVSS20AuthenticationSingle), + string(CVSS20AuthenticationNone), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS20Authentication) UnmarshalText(data []byte) error { + s, err := cvss20AuthenticationPattern(data) + if err == nil { + *e = CVSS20Authentication(s) + } + return err +} + +// CVSS20CiaRequirement represents the ciaRequirementType in CVSS20. +type CVSS20CiaRequirement string + +const ( + // CVSS20CiaRequirementLow is a constant for "LOW". + CVSS20CiaRequirementLow CVSS20CiaRequirement = "LOW" + // CVSS20CiaRequirementMedium is a constant for "MEDIUM". + CVSS20CiaRequirementMedium CVSS20CiaRequirement = "MEDIUM" + // CVSS20CiaRequirementHigh is a constant for "HIGH". + CVSS20CiaRequirementHigh CVSS20CiaRequirement = "HIGH" + // CVSS20CiaRequirementNotDefined is a constant for "NOT_DEFINED". + CVSS20CiaRequirementNotDefined CVSS20CiaRequirement = "NOT_DEFINED" +) + +var cvss20CiaRequirementPattern = alternativesUnmarshal( + string(CVSS20CiaRequirementLow), + string(CVSS20CiaRequirementMedium), + string(CVSS20CiaRequirementHigh), + string(CVSS20CiaRequirementNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS20CiaRequirement) UnmarshalText(data []byte) error { + s, err := cvss20CiaRequirementPattern(data) + if err == nil { + *e = CVSS20CiaRequirement(s) + } + return err +} + +// CVSS20Cia represents the ciaType in CVSS20. +type CVSS20Cia string + +const ( + // CVSS20CiaNone is a constant for "NONE". + CVSS20CiaNone CVSS20Cia = "NONE" + // CVSS20CiaPartial is a constant for "PARTIAL". + CVSS20CiaPartial CVSS20Cia = "PARTIAL" + // CVSS20CiaComplete is a constant for "COMPLETE". + CVSS20CiaComplete CVSS20Cia = "COMPLETE" +) + +var cvss20CiaPattern = alternativesUnmarshal( + string(CVSS20CiaNone), + string(CVSS20CiaPartial), + string(CVSS20CiaComplete), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS20Cia) UnmarshalText(data []byte) error { + s, err := cvss20CiaPattern(data) + if err == nil { + *e = CVSS20Cia(s) + } + return err +} + +// CVSS20CollateralDamagePotential represents the collateralDamagePotentialType in CVSS20. +type CVSS20CollateralDamagePotential string + +const ( + // CVSS20CollateralDamagePotentialNone is a constant for "NONE". + CVSS20CollateralDamagePotentialNone CVSS20CollateralDamagePotential = "NONE" + // CVSS20CollateralDamagePotentialLow is a constant for "LOW". + CVSS20CollateralDamagePotentialLow CVSS20CollateralDamagePotential = "LOW" + // CVSS20CollateralDamagePotentialLowMedium is a constant for "LOW_MEDIUM". + CVSS20CollateralDamagePotentialLowMedium CVSS20CollateralDamagePotential = "LOW_MEDIUM" + // CVSS20CollateralDamagePotentialMediumHigh is a constant for "MEDIUM_HIGH". + CVSS20CollateralDamagePotentialMediumHigh CVSS20CollateralDamagePotential = "MEDIUM_HIGH" + // CVSS20CollateralDamagePotentialHigh is a constant for "HIGH". + CVSS20CollateralDamagePotentialHigh CVSS20CollateralDamagePotential = "HIGH" + // CVSS20CollateralDamagePotentialNotDefined is a constant for "NOT_DEFINED". + CVSS20CollateralDamagePotentialNotDefined CVSS20CollateralDamagePotential = "NOT_DEFINED" +) + +var cvss20CollateralDamagePotentialPattern = alternativesUnmarshal( + string(CVSS20CollateralDamagePotentialNone), + string(CVSS20CollateralDamagePotentialLow), + string(CVSS20CollateralDamagePotentialLowMedium), + string(CVSS20CollateralDamagePotentialMediumHigh), + string(CVSS20CollateralDamagePotentialHigh), + string(CVSS20CollateralDamagePotentialNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS20CollateralDamagePotential) UnmarshalText(data []byte) error { + s, err := cvss20CollateralDamagePotentialPattern(data) + if err == nil { + *e = CVSS20CollateralDamagePotential(s) + } + return err +} + +// CVSS20Exploitability represents the exploitabilityType in CVSS20. +type CVSS20Exploitability string + +const ( + // CVSS20ExploitabilityUnproven is a constant for "UNPROVEN". + CVSS20ExploitabilityUnproven CVSS20Exploitability = "UNPROVEN" + // CVSS20ExploitabilityProofOfConcept is a constant for "PROOF_OF_CONCEPT". + CVSS20ExploitabilityProofOfConcept CVSS20Exploitability = "PROOF_OF_CONCEPT" + // CVSS20ExploitabilityFunctional is a constant for "FUNCTIONAL". + CVSS20ExploitabilityFunctional CVSS20Exploitability = "FUNCTIONAL" + // CVSS20ExploitabilityHigh is a constant for "HIGH". + CVSS20ExploitabilityHigh CVSS20Exploitability = "HIGH" + // CVSS20ExploitabilityNotDefined is a constant for "NOT_DEFINED". + CVSS20ExploitabilityNotDefined CVSS20Exploitability = "NOT_DEFINED" +) + +var cvss20ExploitabilityPattern = alternativesUnmarshal( + string(CVSS20ExploitabilityUnproven), + string(CVSS20ExploitabilityProofOfConcept), + string(CVSS20ExploitabilityFunctional), + string(CVSS20ExploitabilityHigh), + string(CVSS20ExploitabilityNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS20Exploitability) UnmarshalText(data []byte) error { + s, err := cvss20ExploitabilityPattern(data) + if err == nil { + *e = CVSS20Exploitability(s) + } + return err +} + +// CVSS20RemediationLevel represents the remediationLevelType in CVSS20. +type CVSS20RemediationLevel string + +const ( + // CVSS20RemediationLevelOfficialFix is a constant for "OFFICIAL_FIX". + CVSS20RemediationLevelOfficialFix CVSS20RemediationLevel = "OFFICIAL_FIX" + // CVSS20RemediationLevelTemporaryFix is a constant for "TEMPORARY_FIX". + CVSS20RemediationLevelTemporaryFix CVSS20RemediationLevel = "TEMPORARY_FIX" + // CVSS20RemediationLevelWorkaround is a constant for "WORKAROUND". + CVSS20RemediationLevelWorkaround CVSS20RemediationLevel = "WORKAROUND" + // CVSS20RemediationLevelUnavailable is a constant for "UNAVAILABLE". + CVSS20RemediationLevelUnavailable CVSS20RemediationLevel = "UNAVAILABLE" + // CVSS20RemediationLevelNotDefined is a constant for "NOT_DEFINED". + CVSS20RemediationLevelNotDefined CVSS20RemediationLevel = "NOT_DEFINED" +) + +var cvss20RemediationLevelPattern = alternativesUnmarshal( + string(CVSS20RemediationLevelOfficialFix), + string(CVSS20RemediationLevelTemporaryFix), + string(CVSS20RemediationLevelWorkaround), + string(CVSS20RemediationLevelUnavailable), + string(CVSS20RemediationLevelNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS20RemediationLevel) UnmarshalText(data []byte) error { + s, err := cvss20RemediationLevelPattern(data) + if err == nil { + *e = CVSS20RemediationLevel(s) + } + return err +} + +// CVSS20ReportConfidence represents the reportConfidenceType in CVSS20. +type CVSS20ReportConfidence string + +const ( + // CVSS20ReportConfidenceUnconfirmed is a constant for "UNCONFIRMED". + CVSS20ReportConfidenceUnconfirmed CVSS20ReportConfidence = "UNCONFIRMED" + // CVSS20ReportConfidenceUncorroborated is a constant for "UNCORROBORATED". + CVSS20ReportConfidenceUncorroborated CVSS20ReportConfidence = "UNCORROBORATED" + // CVSS20ReportConfidenceConfirmed is a constant for "CONFIRMED". + CVSS20ReportConfidenceConfirmed CVSS20ReportConfidence = "CONFIRMED" + // CVSS20ReportConfidenceNotDefined is a constant for "NOT_DEFINED". + CVSS20ReportConfidenceNotDefined CVSS20ReportConfidence = "NOT_DEFINED" +) + +var cvss20ReportConfidencePattern = alternativesUnmarshal( + string(CVSS20ReportConfidenceUnconfirmed), + string(CVSS20ReportConfidenceUncorroborated), + string(CVSS20ReportConfidenceConfirmed), + string(CVSS20ReportConfidenceNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS20ReportConfidence) UnmarshalText(data []byte) error { + s, err := cvss20ReportConfidencePattern(data) + if err == nil { + *e = CVSS20ReportConfidence(s) + } + return err +} + +// CVSS20TargetDistribution represents the targetDistributionType in CVSS20. +type CVSS20TargetDistribution string + +const ( + // CVSS20TargetDistributionNone is a constant for "NONE". + CVSS20TargetDistributionNone CVSS20TargetDistribution = "NONE" + // CVSS20TargetDistributionLow is a constant for "LOW". + CVSS20TargetDistributionLow CVSS20TargetDistribution = "LOW" + // CVSS20TargetDistributionMedium is a constant for "MEDIUM". + CVSS20TargetDistributionMedium CVSS20TargetDistribution = "MEDIUM" + // CVSS20TargetDistributionHigh is a constant for "HIGH". + CVSS20TargetDistributionHigh CVSS20TargetDistribution = "HIGH" + // CVSS20TargetDistributionNotDefined is a constant for "NOT_DEFINED". + CVSS20TargetDistributionNotDefined CVSS20TargetDistribution = "NOT_DEFINED" +) + +var cvss20TargetDistributionPattern = alternativesUnmarshal( + string(CVSS20TargetDistributionNone), + string(CVSS20TargetDistributionLow), + string(CVSS20TargetDistributionMedium), + string(CVSS20TargetDistributionHigh), + string(CVSS20TargetDistributionNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS20TargetDistribution) UnmarshalText(data []byte) error { + s, err := cvss20TargetDistributionPattern(data) + if err == nil { + *e = CVSS20TargetDistribution(s) + } + return err +} diff --git a/csaf/cvss30enums.go b/csaf/cvss30enums.go new file mode 100644 index 0000000..7524174 --- /dev/null +++ b/csaf/cvss30enums.go @@ -0,0 +1,500 @@ +// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH +// +// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! + +package csaf + +// CVSS30AttackComplexity represents the attackComplexityType in CVSS30. +type CVSS30AttackComplexity string + +const ( + // CVSS30AttackComplexityHigh is a constant for "HIGH". + CVSS30AttackComplexityHigh CVSS30AttackComplexity = "HIGH" + // CVSS30AttackComplexityLow is a constant for "LOW". + CVSS30AttackComplexityLow CVSS30AttackComplexity = "LOW" +) + +var cvss30AttackComplexityPattern = alternativesUnmarshal( + string(CVSS30AttackComplexityHigh), + string(CVSS30AttackComplexityLow), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30AttackComplexity) UnmarshalText(data []byte) error { + s, err := cvss30AttackComplexityPattern(data) + if err == nil { + *e = CVSS30AttackComplexity(s) + } + return err +} + +// CVSS30AttackVector represents the attackVectorType in CVSS30. +type CVSS30AttackVector string + +const ( + // CVSS30AttackVectorNetwork is a constant for "NETWORK". + CVSS30AttackVectorNetwork CVSS30AttackVector = "NETWORK" + // CVSS30AttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK". + CVSS30AttackVectorAdjacentNetwork CVSS30AttackVector = "ADJACENT_NETWORK" + // CVSS30AttackVectorLocal is a constant for "LOCAL". + CVSS30AttackVectorLocal CVSS30AttackVector = "LOCAL" + // CVSS30AttackVectorPhysical is a constant for "PHYSICAL". + CVSS30AttackVectorPhysical CVSS30AttackVector = "PHYSICAL" +) + +var cvss30AttackVectorPattern = alternativesUnmarshal( + string(CVSS30AttackVectorNetwork), + string(CVSS30AttackVectorAdjacentNetwork), + string(CVSS30AttackVectorLocal), + string(CVSS30AttackVectorPhysical), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30AttackVector) UnmarshalText(data []byte) error { + s, err := cvss30AttackVectorPattern(data) + if err == nil { + *e = CVSS30AttackVector(s) + } + return err +} + +// CVSS30CiaRequirement represents the ciaRequirementType in CVSS30. +type CVSS30CiaRequirement string + +const ( + // CVSS30CiaRequirementLow is a constant for "LOW". + CVSS30CiaRequirementLow CVSS30CiaRequirement = "LOW" + // CVSS30CiaRequirementMedium is a constant for "MEDIUM". + CVSS30CiaRequirementMedium CVSS30CiaRequirement = "MEDIUM" + // CVSS30CiaRequirementHigh is a constant for "HIGH". + CVSS30CiaRequirementHigh CVSS30CiaRequirement = "HIGH" + // CVSS30CiaRequirementNotDefined is a constant for "NOT_DEFINED". + CVSS30CiaRequirementNotDefined CVSS30CiaRequirement = "NOT_DEFINED" +) + +var cvss30CiaRequirementPattern = alternativesUnmarshal( + string(CVSS30CiaRequirementLow), + string(CVSS30CiaRequirementMedium), + string(CVSS30CiaRequirementHigh), + string(CVSS30CiaRequirementNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30CiaRequirement) UnmarshalText(data []byte) error { + s, err := cvss30CiaRequirementPattern(data) + if err == nil { + *e = CVSS30CiaRequirement(s) + } + return err +} + +// CVSS30Cia represents the ciaType in CVSS30. +type CVSS30Cia string + +const ( + // CVSS30CiaNone is a constant for "NONE". + CVSS30CiaNone CVSS30Cia = "NONE" + // CVSS30CiaLow is a constant for "LOW". + CVSS30CiaLow CVSS30Cia = "LOW" + // CVSS30CiaHigh is a constant for "HIGH". + CVSS30CiaHigh CVSS30Cia = "HIGH" +) + +var cvss30CiaPattern = alternativesUnmarshal( + string(CVSS30CiaNone), + string(CVSS30CiaLow), + string(CVSS30CiaHigh), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30Cia) UnmarshalText(data []byte) error { + s, err := cvss30CiaPattern(data) + if err == nil { + *e = CVSS30Cia(s) + } + return err +} + +// CVSS30Confidence represents the confidenceType in CVSS30. +type CVSS30Confidence string + +const ( + // CVSS30ConfidenceUnknown is a constant for "UNKNOWN". + CVSS30ConfidenceUnknown CVSS30Confidence = "UNKNOWN" + // CVSS30ConfidenceReasonable is a constant for "REASONABLE". + CVSS30ConfidenceReasonable CVSS30Confidence = "REASONABLE" + // CVSS30ConfidenceConfirmed is a constant for "CONFIRMED". + CVSS30ConfidenceConfirmed CVSS30Confidence = "CONFIRMED" + // CVSS30ConfidenceNotDefined is a constant for "NOT_DEFINED". + CVSS30ConfidenceNotDefined CVSS30Confidence = "NOT_DEFINED" +) + +var cvss30ConfidencePattern = alternativesUnmarshal( + string(CVSS30ConfidenceUnknown), + string(CVSS30ConfidenceReasonable), + string(CVSS30ConfidenceConfirmed), + string(CVSS30ConfidenceNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30Confidence) UnmarshalText(data []byte) error { + s, err := cvss30ConfidencePattern(data) + if err == nil { + *e = CVSS30Confidence(s) + } + return err +} + +// CVSS30ExploitCodeMaturity represents the exploitCodeMaturityType in CVSS30. +type CVSS30ExploitCodeMaturity string + +const ( + // CVSS30ExploitCodeMaturityUnproven is a constant for "UNPROVEN". + CVSS30ExploitCodeMaturityUnproven CVSS30ExploitCodeMaturity = "UNPROVEN" + // CVSS30ExploitCodeMaturityProofOfConcept is a constant for "PROOF_OF_CONCEPT". + CVSS30ExploitCodeMaturityProofOfConcept CVSS30ExploitCodeMaturity = "PROOF_OF_CONCEPT" + // CVSS30ExploitCodeMaturityFunctional is a constant for "FUNCTIONAL". + CVSS30ExploitCodeMaturityFunctional CVSS30ExploitCodeMaturity = "FUNCTIONAL" + // CVSS30ExploitCodeMaturityHigh is a constant for "HIGH". + CVSS30ExploitCodeMaturityHigh CVSS30ExploitCodeMaturity = "HIGH" + // CVSS30ExploitCodeMaturityNotDefined is a constant for "NOT_DEFINED". + CVSS30ExploitCodeMaturityNotDefined CVSS30ExploitCodeMaturity = "NOT_DEFINED" +) + +var cvss30ExploitCodeMaturityPattern = alternativesUnmarshal( + string(CVSS30ExploitCodeMaturityUnproven), + string(CVSS30ExploitCodeMaturityProofOfConcept), + string(CVSS30ExploitCodeMaturityFunctional), + string(CVSS30ExploitCodeMaturityHigh), + string(CVSS30ExploitCodeMaturityNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30ExploitCodeMaturity) UnmarshalText(data []byte) error { + s, err := cvss30ExploitCodeMaturityPattern(data) + if err == nil { + *e = CVSS30ExploitCodeMaturity(s) + } + return err +} + +// CVSS30ModifiedAttackComplexity represents the modifiedAttackComplexityType in CVSS30. +type CVSS30ModifiedAttackComplexity string + +const ( + // CVSS30ModifiedAttackComplexityHigh is a constant for "HIGH". + CVSS30ModifiedAttackComplexityHigh CVSS30ModifiedAttackComplexity = "HIGH" + // CVSS30ModifiedAttackComplexityLow is a constant for "LOW". + CVSS30ModifiedAttackComplexityLow CVSS30ModifiedAttackComplexity = "LOW" + // CVSS30ModifiedAttackComplexityNotDefined is a constant for "NOT_DEFINED". + CVSS30ModifiedAttackComplexityNotDefined CVSS30ModifiedAttackComplexity = "NOT_DEFINED" +) + +var cvss30ModifiedAttackComplexityPattern = alternativesUnmarshal( + string(CVSS30ModifiedAttackComplexityHigh), + string(CVSS30ModifiedAttackComplexityLow), + string(CVSS30ModifiedAttackComplexityNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30ModifiedAttackComplexity) UnmarshalText(data []byte) error { + s, err := cvss30ModifiedAttackComplexityPattern(data) + if err == nil { + *e = CVSS30ModifiedAttackComplexity(s) + } + return err +} + +// CVSS30ModifiedAttackVector represents the modifiedAttackVectorType in CVSS30. +type CVSS30ModifiedAttackVector string + +const ( + // CVSS30ModifiedAttackVectorNetwork is a constant for "NETWORK". + CVSS30ModifiedAttackVectorNetwork CVSS30ModifiedAttackVector = "NETWORK" + // CVSS30ModifiedAttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK". + CVSS30ModifiedAttackVectorAdjacentNetwork CVSS30ModifiedAttackVector = "ADJACENT_NETWORK" + // CVSS30ModifiedAttackVectorLocal is a constant for "LOCAL". + CVSS30ModifiedAttackVectorLocal CVSS30ModifiedAttackVector = "LOCAL" + // CVSS30ModifiedAttackVectorPhysical is a constant for "PHYSICAL". + CVSS30ModifiedAttackVectorPhysical CVSS30ModifiedAttackVector = "PHYSICAL" + // CVSS30ModifiedAttackVectorNotDefined is a constant for "NOT_DEFINED". + CVSS30ModifiedAttackVectorNotDefined CVSS30ModifiedAttackVector = "NOT_DEFINED" +) + +var cvss30ModifiedAttackVectorPattern = alternativesUnmarshal( + string(CVSS30ModifiedAttackVectorNetwork), + string(CVSS30ModifiedAttackVectorAdjacentNetwork), + string(CVSS30ModifiedAttackVectorLocal), + string(CVSS30ModifiedAttackVectorPhysical), + string(CVSS30ModifiedAttackVectorNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30ModifiedAttackVector) UnmarshalText(data []byte) error { + s, err := cvss30ModifiedAttackVectorPattern(data) + if err == nil { + *e = CVSS30ModifiedAttackVector(s) + } + return err +} + +// CVSS30ModifiedCia represents the modifiedCiaType in CVSS30. +type CVSS30ModifiedCia string + +const ( + // CVSS30ModifiedCiaNone is a constant for "NONE". + CVSS30ModifiedCiaNone CVSS30ModifiedCia = "NONE" + // CVSS30ModifiedCiaLow is a constant for "LOW". + CVSS30ModifiedCiaLow CVSS30ModifiedCia = "LOW" + // CVSS30ModifiedCiaHigh is a constant for "HIGH". + CVSS30ModifiedCiaHigh CVSS30ModifiedCia = "HIGH" + // CVSS30ModifiedCiaNotDefined is a constant for "NOT_DEFINED". + CVSS30ModifiedCiaNotDefined CVSS30ModifiedCia = "NOT_DEFINED" +) + +var cvss30ModifiedCiaPattern = alternativesUnmarshal( + string(CVSS30ModifiedCiaNone), + string(CVSS30ModifiedCiaLow), + string(CVSS30ModifiedCiaHigh), + string(CVSS30ModifiedCiaNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30ModifiedCia) UnmarshalText(data []byte) error { + s, err := cvss30ModifiedCiaPattern(data) + if err == nil { + *e = CVSS30ModifiedCia(s) + } + return err +} + +// CVSS30ModifiedPrivilegesRequired represents the modifiedPrivilegesRequiredType in CVSS30. +type CVSS30ModifiedPrivilegesRequired string + +const ( + // CVSS30ModifiedPrivilegesRequiredHigh is a constant for "HIGH". + CVSS30ModifiedPrivilegesRequiredHigh CVSS30ModifiedPrivilegesRequired = "HIGH" + // CVSS30ModifiedPrivilegesRequiredLow is a constant for "LOW". + CVSS30ModifiedPrivilegesRequiredLow CVSS30ModifiedPrivilegesRequired = "LOW" + // CVSS30ModifiedPrivilegesRequiredNone is a constant for "NONE". + CVSS30ModifiedPrivilegesRequiredNone CVSS30ModifiedPrivilegesRequired = "NONE" + // CVSS30ModifiedPrivilegesRequiredNotDefined is a constant for "NOT_DEFINED". + CVSS30ModifiedPrivilegesRequiredNotDefined CVSS30ModifiedPrivilegesRequired = "NOT_DEFINED" +) + +var cvss30ModifiedPrivilegesRequiredPattern = alternativesUnmarshal( + string(CVSS30ModifiedPrivilegesRequiredHigh), + string(CVSS30ModifiedPrivilegesRequiredLow), + string(CVSS30ModifiedPrivilegesRequiredNone), + string(CVSS30ModifiedPrivilegesRequiredNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30ModifiedPrivilegesRequired) UnmarshalText(data []byte) error { + s, err := cvss30ModifiedPrivilegesRequiredPattern(data) + if err == nil { + *e = CVSS30ModifiedPrivilegesRequired(s) + } + return err +} + +// CVSS30ModifiedScope represents the modifiedScopeType in CVSS30. +type CVSS30ModifiedScope string + +const ( + // CVSS30ModifiedScopeUnchanged is a constant for "UNCHANGED". + CVSS30ModifiedScopeUnchanged CVSS30ModifiedScope = "UNCHANGED" + // CVSS30ModifiedScopeChanged is a constant for "CHANGED". + CVSS30ModifiedScopeChanged CVSS30ModifiedScope = "CHANGED" + // CVSS30ModifiedScopeNotDefined is a constant for "NOT_DEFINED". + CVSS30ModifiedScopeNotDefined CVSS30ModifiedScope = "NOT_DEFINED" +) + +var cvss30ModifiedScopePattern = alternativesUnmarshal( + string(CVSS30ModifiedScopeUnchanged), + string(CVSS30ModifiedScopeChanged), + string(CVSS30ModifiedScopeNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30ModifiedScope) UnmarshalText(data []byte) error { + s, err := cvss30ModifiedScopePattern(data) + if err == nil { + *e = CVSS30ModifiedScope(s) + } + return err +} + +// CVSS30ModifiedUserInteraction represents the modifiedUserInteractionType in CVSS30. +type CVSS30ModifiedUserInteraction string + +const ( + // CVSS30ModifiedUserInteractionNone is a constant for "NONE". + CVSS30ModifiedUserInteractionNone CVSS30ModifiedUserInteraction = "NONE" + // CVSS30ModifiedUserInteractionRequired is a constant for "REQUIRED". + CVSS30ModifiedUserInteractionRequired CVSS30ModifiedUserInteraction = "REQUIRED" + // CVSS30ModifiedUserInteractionNotDefined is a constant for "NOT_DEFINED". + CVSS30ModifiedUserInteractionNotDefined CVSS30ModifiedUserInteraction = "NOT_DEFINED" +) + +var cvss30ModifiedUserInteractionPattern = alternativesUnmarshal( + string(CVSS30ModifiedUserInteractionNone), + string(CVSS30ModifiedUserInteractionRequired), + string(CVSS30ModifiedUserInteractionNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30ModifiedUserInteraction) UnmarshalText(data []byte) error { + s, err := cvss30ModifiedUserInteractionPattern(data) + if err == nil { + *e = CVSS30ModifiedUserInteraction(s) + } + return err +} + +// CVSS30PrivilegesRequired represents the privilegesRequiredType in CVSS30. +type CVSS30PrivilegesRequired string + +const ( + // CVSS30PrivilegesRequiredHigh is a constant for "HIGH". + CVSS30PrivilegesRequiredHigh CVSS30PrivilegesRequired = "HIGH" + // CVSS30PrivilegesRequiredLow is a constant for "LOW". + CVSS30PrivilegesRequiredLow CVSS30PrivilegesRequired = "LOW" + // CVSS30PrivilegesRequiredNone is a constant for "NONE". + CVSS30PrivilegesRequiredNone CVSS30PrivilegesRequired = "NONE" +) + +var cvss30PrivilegesRequiredPattern = alternativesUnmarshal( + string(CVSS30PrivilegesRequiredHigh), + string(CVSS30PrivilegesRequiredLow), + string(CVSS30PrivilegesRequiredNone), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30PrivilegesRequired) UnmarshalText(data []byte) error { + s, err := cvss30PrivilegesRequiredPattern(data) + if err == nil { + *e = CVSS30PrivilegesRequired(s) + } + return err +} + +// CVSS30RemediationLevel represents the remediationLevelType in CVSS30. +type CVSS30RemediationLevel string + +const ( + // CVSS30RemediationLevelOfficialFix is a constant for "OFFICIAL_FIX". + CVSS30RemediationLevelOfficialFix CVSS30RemediationLevel = "OFFICIAL_FIX" + // CVSS30RemediationLevelTemporaryFix is a constant for "TEMPORARY_FIX". + CVSS30RemediationLevelTemporaryFix CVSS30RemediationLevel = "TEMPORARY_FIX" + // CVSS30RemediationLevelWorkaround is a constant for "WORKAROUND". + CVSS30RemediationLevelWorkaround CVSS30RemediationLevel = "WORKAROUND" + // CVSS30RemediationLevelUnavailable is a constant for "UNAVAILABLE". + CVSS30RemediationLevelUnavailable CVSS30RemediationLevel = "UNAVAILABLE" + // CVSS30RemediationLevelNotDefined is a constant for "NOT_DEFINED". + CVSS30RemediationLevelNotDefined CVSS30RemediationLevel = "NOT_DEFINED" +) + +var cvss30RemediationLevelPattern = alternativesUnmarshal( + string(CVSS30RemediationLevelOfficialFix), + string(CVSS30RemediationLevelTemporaryFix), + string(CVSS30RemediationLevelWorkaround), + string(CVSS30RemediationLevelUnavailable), + string(CVSS30RemediationLevelNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30RemediationLevel) UnmarshalText(data []byte) error { + s, err := cvss30RemediationLevelPattern(data) + if err == nil { + *e = CVSS30RemediationLevel(s) + } + return err +} + +// CVSS30Scope represents the scopeType in CVSS30. +type CVSS30Scope string + +const ( + // CVSS30ScopeUnchanged is a constant for "UNCHANGED". + CVSS30ScopeUnchanged CVSS30Scope = "UNCHANGED" + // CVSS30ScopeChanged is a constant for "CHANGED". + CVSS30ScopeChanged CVSS30Scope = "CHANGED" +) + +var cvss30ScopePattern = alternativesUnmarshal( + string(CVSS30ScopeUnchanged), + string(CVSS30ScopeChanged), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30Scope) UnmarshalText(data []byte) error { + s, err := cvss30ScopePattern(data) + if err == nil { + *e = CVSS30Scope(s) + } + return err +} + +// CVSS30Severity represents the severityType in CVSS30. +type CVSS30Severity string + +const ( + // CVSS30SeverityNone is a constant for "NONE". + CVSS30SeverityNone CVSS30Severity = "NONE" + // CVSS30SeverityLow is a constant for "LOW". + CVSS30SeverityLow CVSS30Severity = "LOW" + // CVSS30SeverityMedium is a constant for "MEDIUM". + CVSS30SeverityMedium CVSS30Severity = "MEDIUM" + // CVSS30SeverityHigh is a constant for "HIGH". + CVSS30SeverityHigh CVSS30Severity = "HIGH" + // CVSS30SeverityCritical is a constant for "CRITICAL". + CVSS30SeverityCritical CVSS30Severity = "CRITICAL" +) + +var cvss30SeverityPattern = alternativesUnmarshal( + string(CVSS30SeverityNone), + string(CVSS30SeverityLow), + string(CVSS30SeverityMedium), + string(CVSS30SeverityHigh), + string(CVSS30SeverityCritical), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30Severity) UnmarshalText(data []byte) error { + s, err := cvss30SeverityPattern(data) + if err == nil { + *e = CVSS30Severity(s) + } + return err +} + +// CVSS30UserInteraction represents the userInteractionType in CVSS30. +type CVSS30UserInteraction string + +const ( + // CVSS30UserInteractionNone is a constant for "NONE". + CVSS30UserInteractionNone CVSS30UserInteraction = "NONE" + // CVSS30UserInteractionRequired is a constant for "REQUIRED". + CVSS30UserInteractionRequired CVSS30UserInteraction = "REQUIRED" +) + +var cvss30UserInteractionPattern = alternativesUnmarshal( + string(CVSS30UserInteractionNone), + string(CVSS30UserInteractionRequired), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS30UserInteraction) UnmarshalText(data []byte) error { + s, err := cvss30UserInteractionPattern(data) + if err == nil { + *e = CVSS30UserInteraction(s) + } + return err +} diff --git a/csaf/cvss31enums.go b/csaf/cvss31enums.go new file mode 100644 index 0000000..0de4946 --- /dev/null +++ b/csaf/cvss31enums.go @@ -0,0 +1,500 @@ +// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH +// +// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! + +package csaf + +// CVSS31AttackComplexity represents the attackComplexityType in CVSS31. +type CVSS31AttackComplexity string + +const ( + // CVSS31AttackComplexityHigh is a constant for "HIGH". + CVSS31AttackComplexityHigh CVSS31AttackComplexity = "HIGH" + // CVSS31AttackComplexityLow is a constant for "LOW". + CVSS31AttackComplexityLow CVSS31AttackComplexity = "LOW" +) + +var cvss31AttackComplexityPattern = alternativesUnmarshal( + string(CVSS31AttackComplexityHigh), + string(CVSS31AttackComplexityLow), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31AttackComplexity) UnmarshalText(data []byte) error { + s, err := cvss31AttackComplexityPattern(data) + if err == nil { + *e = CVSS31AttackComplexity(s) + } + return err +} + +// CVSS31AttackVector represents the attackVectorType in CVSS31. +type CVSS31AttackVector string + +const ( + // CVSS31AttackVectorNetwork is a constant for "NETWORK". + CVSS31AttackVectorNetwork CVSS31AttackVector = "NETWORK" + // CVSS31AttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK". + CVSS31AttackVectorAdjacentNetwork CVSS31AttackVector = "ADJACENT_NETWORK" + // CVSS31AttackVectorLocal is a constant for "LOCAL". + CVSS31AttackVectorLocal CVSS31AttackVector = "LOCAL" + // CVSS31AttackVectorPhysical is a constant for "PHYSICAL". + CVSS31AttackVectorPhysical CVSS31AttackVector = "PHYSICAL" +) + +var cvss31AttackVectorPattern = alternativesUnmarshal( + string(CVSS31AttackVectorNetwork), + string(CVSS31AttackVectorAdjacentNetwork), + string(CVSS31AttackVectorLocal), + string(CVSS31AttackVectorPhysical), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31AttackVector) UnmarshalText(data []byte) error { + s, err := cvss31AttackVectorPattern(data) + if err == nil { + *e = CVSS31AttackVector(s) + } + return err +} + +// CVSS31CiaRequirement represents the ciaRequirementType in CVSS31. +type CVSS31CiaRequirement string + +const ( + // CVSS31CiaRequirementLow is a constant for "LOW". + CVSS31CiaRequirementLow CVSS31CiaRequirement = "LOW" + // CVSS31CiaRequirementMedium is a constant for "MEDIUM". + CVSS31CiaRequirementMedium CVSS31CiaRequirement = "MEDIUM" + // CVSS31CiaRequirementHigh is a constant for "HIGH". + CVSS31CiaRequirementHigh CVSS31CiaRequirement = "HIGH" + // CVSS31CiaRequirementNotDefined is a constant for "NOT_DEFINED". + CVSS31CiaRequirementNotDefined CVSS31CiaRequirement = "NOT_DEFINED" +) + +var cvss31CiaRequirementPattern = alternativesUnmarshal( + string(CVSS31CiaRequirementLow), + string(CVSS31CiaRequirementMedium), + string(CVSS31CiaRequirementHigh), + string(CVSS31CiaRequirementNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31CiaRequirement) UnmarshalText(data []byte) error { + s, err := cvss31CiaRequirementPattern(data) + if err == nil { + *e = CVSS31CiaRequirement(s) + } + return err +} + +// CVSS31Cia represents the ciaType in CVSS31. +type CVSS31Cia string + +const ( + // CVSS31CiaNone is a constant for "NONE". + CVSS31CiaNone CVSS31Cia = "NONE" + // CVSS31CiaLow is a constant for "LOW". + CVSS31CiaLow CVSS31Cia = "LOW" + // CVSS31CiaHigh is a constant for "HIGH". + CVSS31CiaHigh CVSS31Cia = "HIGH" +) + +var cvss31CiaPattern = alternativesUnmarshal( + string(CVSS31CiaNone), + string(CVSS31CiaLow), + string(CVSS31CiaHigh), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31Cia) UnmarshalText(data []byte) error { + s, err := cvss31CiaPattern(data) + if err == nil { + *e = CVSS31Cia(s) + } + return err +} + +// CVSS31Confidence represents the confidenceType in CVSS31. +type CVSS31Confidence string + +const ( + // CVSS31ConfidenceUnknown is a constant for "UNKNOWN". + CVSS31ConfidenceUnknown CVSS31Confidence = "UNKNOWN" + // CVSS31ConfidenceReasonable is a constant for "REASONABLE". + CVSS31ConfidenceReasonable CVSS31Confidence = "REASONABLE" + // CVSS31ConfidenceConfirmed is a constant for "CONFIRMED". + CVSS31ConfidenceConfirmed CVSS31Confidence = "CONFIRMED" + // CVSS31ConfidenceNotDefined is a constant for "NOT_DEFINED". + CVSS31ConfidenceNotDefined CVSS31Confidence = "NOT_DEFINED" +) + +var cvss31ConfidencePattern = alternativesUnmarshal( + string(CVSS31ConfidenceUnknown), + string(CVSS31ConfidenceReasonable), + string(CVSS31ConfidenceConfirmed), + string(CVSS31ConfidenceNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31Confidence) UnmarshalText(data []byte) error { + s, err := cvss31ConfidencePattern(data) + if err == nil { + *e = CVSS31Confidence(s) + } + return err +} + +// CVSS31ExploitCodeMaturity represents the exploitCodeMaturityType in CVSS31. +type CVSS31ExploitCodeMaturity string + +const ( + // CVSS31ExploitCodeMaturityUnproven is a constant for "UNPROVEN". + CVSS31ExploitCodeMaturityUnproven CVSS31ExploitCodeMaturity = "UNPROVEN" + // CVSS31ExploitCodeMaturityProofOfConcept is a constant for "PROOF_OF_CONCEPT". + CVSS31ExploitCodeMaturityProofOfConcept CVSS31ExploitCodeMaturity = "PROOF_OF_CONCEPT" + // CVSS31ExploitCodeMaturityFunctional is a constant for "FUNCTIONAL". + CVSS31ExploitCodeMaturityFunctional CVSS31ExploitCodeMaturity = "FUNCTIONAL" + // CVSS31ExploitCodeMaturityHigh is a constant for "HIGH". + CVSS31ExploitCodeMaturityHigh CVSS31ExploitCodeMaturity = "HIGH" + // CVSS31ExploitCodeMaturityNotDefined is a constant for "NOT_DEFINED". + CVSS31ExploitCodeMaturityNotDefined CVSS31ExploitCodeMaturity = "NOT_DEFINED" +) + +var cvss31ExploitCodeMaturityPattern = alternativesUnmarshal( + string(CVSS31ExploitCodeMaturityUnproven), + string(CVSS31ExploitCodeMaturityProofOfConcept), + string(CVSS31ExploitCodeMaturityFunctional), + string(CVSS31ExploitCodeMaturityHigh), + string(CVSS31ExploitCodeMaturityNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31ExploitCodeMaturity) UnmarshalText(data []byte) error { + s, err := cvss31ExploitCodeMaturityPattern(data) + if err == nil { + *e = CVSS31ExploitCodeMaturity(s) + } + return err +} + +// CVSS31ModifiedAttackComplexity represents the modifiedAttackComplexityType in CVSS31. +type CVSS31ModifiedAttackComplexity string + +const ( + // CVSS31ModifiedAttackComplexityHigh is a constant for "HIGH". + CVSS31ModifiedAttackComplexityHigh CVSS31ModifiedAttackComplexity = "HIGH" + // CVSS31ModifiedAttackComplexityLow is a constant for "LOW". + CVSS31ModifiedAttackComplexityLow CVSS31ModifiedAttackComplexity = "LOW" + // CVSS31ModifiedAttackComplexityNotDefined is a constant for "NOT_DEFINED". + CVSS31ModifiedAttackComplexityNotDefined CVSS31ModifiedAttackComplexity = "NOT_DEFINED" +) + +var cvss31ModifiedAttackComplexityPattern = alternativesUnmarshal( + string(CVSS31ModifiedAttackComplexityHigh), + string(CVSS31ModifiedAttackComplexityLow), + string(CVSS31ModifiedAttackComplexityNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31ModifiedAttackComplexity) UnmarshalText(data []byte) error { + s, err := cvss31ModifiedAttackComplexityPattern(data) + if err == nil { + *e = CVSS31ModifiedAttackComplexity(s) + } + return err +} + +// CVSS31ModifiedAttackVector represents the modifiedAttackVectorType in CVSS31. +type CVSS31ModifiedAttackVector string + +const ( + // CVSS31ModifiedAttackVectorNetwork is a constant for "NETWORK". + CVSS31ModifiedAttackVectorNetwork CVSS31ModifiedAttackVector = "NETWORK" + // CVSS31ModifiedAttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK". + CVSS31ModifiedAttackVectorAdjacentNetwork CVSS31ModifiedAttackVector = "ADJACENT_NETWORK" + // CVSS31ModifiedAttackVectorLocal is a constant for "LOCAL". + CVSS31ModifiedAttackVectorLocal CVSS31ModifiedAttackVector = "LOCAL" + // CVSS31ModifiedAttackVectorPhysical is a constant for "PHYSICAL". + CVSS31ModifiedAttackVectorPhysical CVSS31ModifiedAttackVector = "PHYSICAL" + // CVSS31ModifiedAttackVectorNotDefined is a constant for "NOT_DEFINED". + CVSS31ModifiedAttackVectorNotDefined CVSS31ModifiedAttackVector = "NOT_DEFINED" +) + +var cvss31ModifiedAttackVectorPattern = alternativesUnmarshal( + string(CVSS31ModifiedAttackVectorNetwork), + string(CVSS31ModifiedAttackVectorAdjacentNetwork), + string(CVSS31ModifiedAttackVectorLocal), + string(CVSS31ModifiedAttackVectorPhysical), + string(CVSS31ModifiedAttackVectorNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31ModifiedAttackVector) UnmarshalText(data []byte) error { + s, err := cvss31ModifiedAttackVectorPattern(data) + if err == nil { + *e = CVSS31ModifiedAttackVector(s) + } + return err +} + +// CVSS31ModifiedCia represents the modifiedCiaType in CVSS31. +type CVSS31ModifiedCia string + +const ( + // CVSS31ModifiedCiaNone is a constant for "NONE". + CVSS31ModifiedCiaNone CVSS31ModifiedCia = "NONE" + // CVSS31ModifiedCiaLow is a constant for "LOW". + CVSS31ModifiedCiaLow CVSS31ModifiedCia = "LOW" + // CVSS31ModifiedCiaHigh is a constant for "HIGH". + CVSS31ModifiedCiaHigh CVSS31ModifiedCia = "HIGH" + // CVSS31ModifiedCiaNotDefined is a constant for "NOT_DEFINED". + CVSS31ModifiedCiaNotDefined CVSS31ModifiedCia = "NOT_DEFINED" +) + +var cvss31ModifiedCiaPattern = alternativesUnmarshal( + string(CVSS31ModifiedCiaNone), + string(CVSS31ModifiedCiaLow), + string(CVSS31ModifiedCiaHigh), + string(CVSS31ModifiedCiaNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31ModifiedCia) UnmarshalText(data []byte) error { + s, err := cvss31ModifiedCiaPattern(data) + if err == nil { + *e = CVSS31ModifiedCia(s) + } + return err +} + +// CVSS31ModifiedPrivilegesRequired represents the modifiedPrivilegesRequiredType in CVSS31. +type CVSS31ModifiedPrivilegesRequired string + +const ( + // CVSS31ModifiedPrivilegesRequiredHigh is a constant for "HIGH". + CVSS31ModifiedPrivilegesRequiredHigh CVSS31ModifiedPrivilegesRequired = "HIGH" + // CVSS31ModifiedPrivilegesRequiredLow is a constant for "LOW". + CVSS31ModifiedPrivilegesRequiredLow CVSS31ModifiedPrivilegesRequired = "LOW" + // CVSS31ModifiedPrivilegesRequiredNone is a constant for "NONE". + CVSS31ModifiedPrivilegesRequiredNone CVSS31ModifiedPrivilegesRequired = "NONE" + // CVSS31ModifiedPrivilegesRequiredNotDefined is a constant for "NOT_DEFINED". + CVSS31ModifiedPrivilegesRequiredNotDefined CVSS31ModifiedPrivilegesRequired = "NOT_DEFINED" +) + +var cvss31ModifiedPrivilegesRequiredPattern = alternativesUnmarshal( + string(CVSS31ModifiedPrivilegesRequiredHigh), + string(CVSS31ModifiedPrivilegesRequiredLow), + string(CVSS31ModifiedPrivilegesRequiredNone), + string(CVSS31ModifiedPrivilegesRequiredNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31ModifiedPrivilegesRequired) UnmarshalText(data []byte) error { + s, err := cvss31ModifiedPrivilegesRequiredPattern(data) + if err == nil { + *e = CVSS31ModifiedPrivilegesRequired(s) + } + return err +} + +// CVSS31ModifiedScope represents the modifiedScopeType in CVSS31. +type CVSS31ModifiedScope string + +const ( + // CVSS31ModifiedScopeUnchanged is a constant for "UNCHANGED". + CVSS31ModifiedScopeUnchanged CVSS31ModifiedScope = "UNCHANGED" + // CVSS31ModifiedScopeChanged is a constant for "CHANGED". + CVSS31ModifiedScopeChanged CVSS31ModifiedScope = "CHANGED" + // CVSS31ModifiedScopeNotDefined is a constant for "NOT_DEFINED". + CVSS31ModifiedScopeNotDefined CVSS31ModifiedScope = "NOT_DEFINED" +) + +var cvss31ModifiedScopePattern = alternativesUnmarshal( + string(CVSS31ModifiedScopeUnchanged), + string(CVSS31ModifiedScopeChanged), + string(CVSS31ModifiedScopeNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31ModifiedScope) UnmarshalText(data []byte) error { + s, err := cvss31ModifiedScopePattern(data) + if err == nil { + *e = CVSS31ModifiedScope(s) + } + return err +} + +// CVSS31ModifiedUserInteraction represents the modifiedUserInteractionType in CVSS31. +type CVSS31ModifiedUserInteraction string + +const ( + // CVSS31ModifiedUserInteractionNone is a constant for "NONE". + CVSS31ModifiedUserInteractionNone CVSS31ModifiedUserInteraction = "NONE" + // CVSS31ModifiedUserInteractionRequired is a constant for "REQUIRED". + CVSS31ModifiedUserInteractionRequired CVSS31ModifiedUserInteraction = "REQUIRED" + // CVSS31ModifiedUserInteractionNotDefined is a constant for "NOT_DEFINED". + CVSS31ModifiedUserInteractionNotDefined CVSS31ModifiedUserInteraction = "NOT_DEFINED" +) + +var cvss31ModifiedUserInteractionPattern = alternativesUnmarshal( + string(CVSS31ModifiedUserInteractionNone), + string(CVSS31ModifiedUserInteractionRequired), + string(CVSS31ModifiedUserInteractionNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31ModifiedUserInteraction) UnmarshalText(data []byte) error { + s, err := cvss31ModifiedUserInteractionPattern(data) + if err == nil { + *e = CVSS31ModifiedUserInteraction(s) + } + return err +} + +// CVSS31PrivilegesRequired represents the privilegesRequiredType in CVSS31. +type CVSS31PrivilegesRequired string + +const ( + // CVSS31PrivilegesRequiredHigh is a constant for "HIGH". + CVSS31PrivilegesRequiredHigh CVSS31PrivilegesRequired = "HIGH" + // CVSS31PrivilegesRequiredLow is a constant for "LOW". + CVSS31PrivilegesRequiredLow CVSS31PrivilegesRequired = "LOW" + // CVSS31PrivilegesRequiredNone is a constant for "NONE". + CVSS31PrivilegesRequiredNone CVSS31PrivilegesRequired = "NONE" +) + +var cvss31PrivilegesRequiredPattern = alternativesUnmarshal( + string(CVSS31PrivilegesRequiredHigh), + string(CVSS31PrivilegesRequiredLow), + string(CVSS31PrivilegesRequiredNone), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31PrivilegesRequired) UnmarshalText(data []byte) error { + s, err := cvss31PrivilegesRequiredPattern(data) + if err == nil { + *e = CVSS31PrivilegesRequired(s) + } + return err +} + +// CVSS31RemediationLevel represents the remediationLevelType in CVSS31. +type CVSS31RemediationLevel string + +const ( + // CVSS31RemediationLevelOfficialFix is a constant for "OFFICIAL_FIX". + CVSS31RemediationLevelOfficialFix CVSS31RemediationLevel = "OFFICIAL_FIX" + // CVSS31RemediationLevelTemporaryFix is a constant for "TEMPORARY_FIX". + CVSS31RemediationLevelTemporaryFix CVSS31RemediationLevel = "TEMPORARY_FIX" + // CVSS31RemediationLevelWorkaround is a constant for "WORKAROUND". + CVSS31RemediationLevelWorkaround CVSS31RemediationLevel = "WORKAROUND" + // CVSS31RemediationLevelUnavailable is a constant for "UNAVAILABLE". + CVSS31RemediationLevelUnavailable CVSS31RemediationLevel = "UNAVAILABLE" + // CVSS31RemediationLevelNotDefined is a constant for "NOT_DEFINED". + CVSS31RemediationLevelNotDefined CVSS31RemediationLevel = "NOT_DEFINED" +) + +var cvss31RemediationLevelPattern = alternativesUnmarshal( + string(CVSS31RemediationLevelOfficialFix), + string(CVSS31RemediationLevelTemporaryFix), + string(CVSS31RemediationLevelWorkaround), + string(CVSS31RemediationLevelUnavailable), + string(CVSS31RemediationLevelNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31RemediationLevel) UnmarshalText(data []byte) error { + s, err := cvss31RemediationLevelPattern(data) + if err == nil { + *e = CVSS31RemediationLevel(s) + } + return err +} + +// CVSS31Scope represents the scopeType in CVSS31. +type CVSS31Scope string + +const ( + // CVSS31ScopeUnchanged is a constant for "UNCHANGED". + CVSS31ScopeUnchanged CVSS31Scope = "UNCHANGED" + // CVSS31ScopeChanged is a constant for "CHANGED". + CVSS31ScopeChanged CVSS31Scope = "CHANGED" +) + +var cvss31ScopePattern = alternativesUnmarshal( + string(CVSS31ScopeUnchanged), + string(CVSS31ScopeChanged), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31Scope) UnmarshalText(data []byte) error { + s, err := cvss31ScopePattern(data) + if err == nil { + *e = CVSS31Scope(s) + } + return err +} + +// CVSS31Severity represents the severityType in CVSS31. +type CVSS31Severity string + +const ( + // CVSS31SeverityNone is a constant for "NONE". + CVSS31SeverityNone CVSS31Severity = "NONE" + // CVSS31SeverityLow is a constant for "LOW". + CVSS31SeverityLow CVSS31Severity = "LOW" + // CVSS31SeverityMedium is a constant for "MEDIUM". + CVSS31SeverityMedium CVSS31Severity = "MEDIUM" + // CVSS31SeverityHigh is a constant for "HIGH". + CVSS31SeverityHigh CVSS31Severity = "HIGH" + // CVSS31SeverityCritical is a constant for "CRITICAL". + CVSS31SeverityCritical CVSS31Severity = "CRITICAL" +) + +var cvss31SeverityPattern = alternativesUnmarshal( + string(CVSS31SeverityNone), + string(CVSS31SeverityLow), + string(CVSS31SeverityMedium), + string(CVSS31SeverityHigh), + string(CVSS31SeverityCritical), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31Severity) UnmarshalText(data []byte) error { + s, err := cvss31SeverityPattern(data) + if err == nil { + *e = CVSS31Severity(s) + } + return err +} + +// CVSS31UserInteraction represents the userInteractionType in CVSS31. +type CVSS31UserInteraction string + +const ( + // CVSS31UserInteractionNone is a constant for "NONE". + CVSS31UserInteractionNone CVSS31UserInteraction = "NONE" + // CVSS31UserInteractionRequired is a constant for "REQUIRED". + CVSS31UserInteractionRequired CVSS31UserInteraction = "REQUIRED" +) + +var cvss31UserInteractionPattern = alternativesUnmarshal( + string(CVSS31UserInteractionNone), + string(CVSS31UserInteractionRequired), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS31UserInteraction) UnmarshalText(data []byte) error { + s, err := cvss31UserInteractionPattern(data) + if err == nil { + *e = CVSS31UserInteraction(s) + } + return err +} diff --git a/csaf/doc.go b/csaf/doc.go index 83dd571..92d8ee3 100644 --- a/csaf/doc.go +++ b/csaf/doc.go @@ -8,3 +8,7 @@ // Package csaf contains the core data models used by the csaf distribution. package csaf + +//go:generate go run ./generate_cvss_enums.go -o cvss20enums.go -i ./schema/cvss-v2.0.json -p CVSS20 +//go:generate go run ./generate_cvss_enums.go -o cvss30enums.go -i ./schema/cvss-v3.0.json -p CVSS30 +//go:generate go run ./generate_cvss_enums.go -o cvss31enums.go -i ./schema/cvss-v3.1.json -p CVSS31 diff --git a/csaf/generate_cvss_enums.go b/csaf/generate_cvss_enums.go new file mode 100644 index 0000000..911b64d --- /dev/null +++ b/csaf/generate_cvss_enums.go @@ -0,0 +1,152 @@ +//go:build ignore + +// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +package main + +import ( + "bytes" + "encoding/json" + "flag" + "go/format" + "log" + "os" + "slices" + "strings" + "text/template" +) + +const tmplText = `// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH +// +// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! + +package csaf + +{{ range $key := .Keys }} +{{ $def := index $.Definitions $key }} +// {{ $type := printf "%s%s" $.Prefix (typename $key) }}{{ $type }} represents the {{ $key }} in {{ $.Prefix }}. +type {{ $type }} string +const ( + {{ range $enum := $def.Enum -}} + // {{ $type}}{{ symbol $enum }} is a constant for "{{ $enum }}". + {{ $type }}{{ symbol $enum }} {{ $type }} = "{{ $enum }}" + {{ end }} +) +var {{ tolower $.Prefix }}{{ typename $key }}Pattern = alternativesUnmarshal( + {{ range $enum := $def.Enum -}} + string({{ $type }}{{ symbol $enum }}), + {{ end }} +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *{{ $type }}) UnmarshalText(data []byte) error { + s, err := {{ tolower $.Prefix }}{{ typename $key }}Pattern(data) + if err == nil { + *e = {{ $type }}(s) + } + return err +} +{{ end }} +` + +var tmpl = template.Must(template.New("enums").Funcs(funcs).Parse(tmplText)) + +type definition struct { + Type string `json:"type"` + Enum []string `json:"enum"` +} + +type schema struct { + Definitions map[string]*definition `json:"definitions"` +} + +var funcs = template.FuncMap{ + "tolower": strings.ToLower, + "symbol": func(s string) string { + s = strings.ToLower(s) + s = strings.ReplaceAll(s, "_", " ") + s = strings.Title(s) + s = strings.ReplaceAll(s, " ", "") + return s + }, + "typename": func(s string) string { + if strings.HasSuffix(s, "Type") { + s = s[:len(s)-len("Type")] + } + s = strings.Title(s) + return s + }, +} + +func loadSchema(filename string) (*schema, error) { + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer f.Close() + var s schema + if err := json.NewDecoder(f).Decode(&s); err != nil { + return nil, err + } + return &s, nil +} + +func check(err error) { + if err != nil { + log.Fatalf("error: %v\n", err) + } +} + +func main() { + var ( + input = flag.String("i", "input", "") + output = flag.String("o", "output", "") + prefix = flag.String("p", "prefix", "") + ) + flag.Parse() + if *input == "" { + log.Fatalln("missing schema") + } + if *output == "" { + log.Fatalln("missing output") + } + if *prefix == "" { + log.Fatalln("missing prefix") + } + + s, err := loadSchema(*input) + check(err) + + defs := make([]string, 0, len(s.Definitions)) + for k, v := range s.Definitions { + if v.Type == "string" && len(v.Enum) > 0 { + defs = append(defs, k) + } + } + slices.Sort(defs) + + var source bytes.Buffer + + check(tmpl.Execute(&source, map[string]any{ + "Prefix": *prefix, + "Definitions": s.Definitions, + "Keys": defs, + })) + + formatted, err := format.Source(source.Bytes()) + check(err) + + check(os.WriteFile(*output, formatted, 0644)) +} From f59a8cc7a9eb99e4d2d2a967d1a7b4d9e164bf6c Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Mon, 4 Sep 2023 15:58:28 +0200 Subject: [PATCH 004/235] use generated types --- csaf/advisory.go | 78 ++++++++++++++++++++++++------------------------ 1 file changed, 39 insertions(+), 39 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index 601b56e..76019b6 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -658,54 +658,54 @@ type CVSSv2 struct { AccessVector *CVSS20AccessVector `json:"accessVector"` AccessComplexity *CVSS20AccessComplexity `json:"accessComplexity"` Authentication *CVSS20Authentication `json:"authentication"` - ConfidentialityImpact *string `json:"confidentialityImpact"` - IntegrityImpact *string `json:"integrityImpact"` - AvailabilityImpact *string `json:"availabilityImpact"` + ConfidentialityImpact *CVSS20Cia `json:"confidentialityImpact"` + IntegrityImpact *CVSS20Cia `json:"integrityImpact"` + AvailabilityImpact *CVSS20Cia `json:"availabilityImpact"` BaseScore float64 `json:"baseScore"` // required Exploitability *CVSS20Exploitability `json:"exploitability"` RemediationLevel *CVSS20RemediationLevel `json:"remediationLevel"` - ReportConfidence *string `json:"reportConfidence"` - TemporalScore *string `json:"temporalScore"` + ReportConfidence *CVSS20ReportConfidence `json:"reportConfidence"` + TemporalScore *float64 `json:"temporalScore"` CollateralDamagePotential *CVSS20CollateralDamagePotential `json:"collateralDamagePotential"` TargetDistribution *CVSS20TargetDistribution `json:"targetDistribution"` - ConfidentialityRequirement *string `json:"confidentialityRequirement"` - IntegrityRequirement *string `json:"integrityRequirement"` - AvailabilityRequirement *string `json:"availabilityRequirement"` - EnvironmentalScore *string `json:"environmentalScore"` + ConfidentialityRequirement *CVSS20CiaRequirement `json:"confidentialityRequirement"` + IntegrityRequirement *CVSS20CiaRequirement `json:"integrityRequirement"` + AvailabilityRequirement *CVSS20CiaRequirement `json:"availabilityRequirement"` + EnvironmentalScore *float64 `json:"environmentalScore"` } // CVSSv3 holding a CVSS v3.x value type CVSSv3 struct { - Version string `json:"version"` // required - VectorString string `json:"vectorString"` // required - AttackVector *string `json:"attackVector"` - AttackComplexity *CVSS30AttackComplexity `json:"attackComplexity"` - PrivilegesRequired *CVSS30PrivilegesRequired `json:"privilegesRequired"` - UserInteraction *CVSS30UserInteraction `json:"userInteraction"` - Scope *CVSS30Scope `json:"scope"` - ConfidentialityImpact *string `json:"confidentialityImpact"` - IntegrityImpact string `json:"integrityImpact"` - AvailabilityImpact *string `json:"availabilityImpact"` - BaseScore float64 `json:"baseScore"` // required - BaseSeverity string `json:"baseSeverity"` // required - ExploitCodeMaturity *CVSS30ExploitCodeMaturity `json:"exploitCodeMaturity"` - RemediationLevel *CVSS30RemediationLevel `json:"remediationLevel"` - ReportConfidence *string `json:"reportConfidence"` - TemporalScore *string `json:"temporalScore"` - TemporalSeverity *string `json:"temporalSeverity"` - ConfidentialityRequirement *string `json:"confidentialityRequirement"` - IntegrityRequirement *string `json:"integrityRequirement"` - AvailabilityRequirement *string `json:"availabilityRequirement"` - ModifiedAttackVector *CVSS30ModifiedAttackVector `json:"modifiedAttackVector"` - ModifiedAttackComplexity *CVSS30ModifiedAttackComplexity `json:"modifiedAttackComplexity"` - ModifiedPrivilegesRequired *string `json:"modifiedPrivilegesRequired"` - ModifiedUserInteraction *string `json:"modifiedUserInteraction"` - ModifiedScope *string `json:"modifiedScope"` - ModifiedConfidentialityImpact *string `json:"modifiedConfidentialityImpact"` - ModifiedIntegrityImpact *string `json:"modifiedIntegrityImpact"` - ModifiedAvailabilityImpact *string `json:"modifiedAvailabilityImpact"` - EenvironmentalScore *string `json:"environmentalScore"` - EnvironmentalSeverity *string `json:"environmentalSeverity"` + Version string `json:"version"` // required + VectorString string `json:"vectorString"` // required + AttackVector *CVSS30AttackVector `json:"attackVector"` + AttackComplexity *CVSS30AttackComplexity `json:"attackComplexity"` + PrivilegesRequired *CVSS30PrivilegesRequired `json:"privilegesRequired"` + UserInteraction *CVSS30UserInteraction `json:"userInteraction"` + Scope *CVSS30Scope `json:"scope"` + ConfidentialityImpact *CVSS30Cia `json:"confidentialityImpact"` + IntegrityImpact CVSS30Cia `json:"integrityImpact"` + AvailabilityImpact *CVSS30Cia `json:"availabilityImpact"` + BaseScore float64 `json:"baseScore"` // required + BaseSeverity CVSS30Severity `json:"baseSeverity"` // required + ExploitCodeMaturity *CVSS30ExploitCodeMaturity `json:"exploitCodeMaturity"` + RemediationLevel *CVSS30RemediationLevel `json:"remediationLevel"` + ReportConfidence *CVSS30Confidence `json:"reportConfidence"` + TemporalScore *float64 `json:"temporalScore"` + TemporalSeverity *CVSS30Severity `json:"temporalSeverity"` + ConfidentialityRequirement *CVSS30CiaRequirement `json:"confidentialityRequirement"` + IntegrityRequirement *CVSS30CiaRequirement `json:"integrityRequirement"` + AvailabilityRequirement *CVSS30CiaRequirement `json:"availabilityRequirement"` + ModifiedAttackVector *CVSS30ModifiedAttackVector `json:"modifiedAttackVector"` + ModifiedAttackComplexity *CVSS30ModifiedAttackComplexity `json:"modifiedAttackComplexity"` + ModifiedPrivilegesRequired *CVSS30ModifiedPrivilegesRequired `json:"modifiedPrivilegesRequired"` + ModifiedUserInteraction *CVSS30ModifiedUserInteraction `json:"modifiedUserInteraction"` + ModifiedScope *CVSS30ModifiedScope `json:"modifiedScope"` + ModifiedConfidentialityImpact *CVSS30ModifiedCia `json:"modifiedConfidentialityImpact"` + ModifiedIntegrityImpact *CVSS30ModifiedCia `json:"modifiedIntegrityImpact"` + ModifiedAvailabilityImpact *CVSS30ModifiedCia `json:"modifiedAvailabilityImpact"` + EenvironmentalScore *float64 `json:"environmentalScore"` + EnvironmentalSeverity *CVSS30Severity `json:"environmentalSeverity"` } // Score specifies information about (at least one) score of the vulnerability and for which From 4fc2fd9bf2d9a95ce3b21b30e3c1096314283114 Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Mon, 4 Sep 2023 16:06:47 +0200 Subject: [PATCH 005/235] added omitempty for cvss structs --- csaf/advisory.go | 84 ++++++++++++++++++++++++------------------------ 1 file changed, 42 insertions(+), 42 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index 76019b6..39604a8 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -655,57 +655,57 @@ var CVSSv3VersionPattern = alternativesUnmarshal( type CVSSv2 struct { Version string `json:"version"` // required VectorString string `json:"vectorString"` // required - AccessVector *CVSS20AccessVector `json:"accessVector"` - AccessComplexity *CVSS20AccessComplexity `json:"accessComplexity"` - Authentication *CVSS20Authentication `json:"authentication"` - ConfidentialityImpact *CVSS20Cia `json:"confidentialityImpact"` - IntegrityImpact *CVSS20Cia `json:"integrityImpact"` - AvailabilityImpact *CVSS20Cia `json:"availabilityImpact"` + AccessVector *CVSS20AccessVector `json:"accessVector,omitempty"` + AccessComplexity *CVSS20AccessComplexity `json:"accessComplexity,omitempty"` + Authentication *CVSS20Authentication `json:"authentication,omitempty"` + ConfidentialityImpact *CVSS20Cia `json:"confidentialityImpact,omitempty"` + IntegrityImpact *CVSS20Cia `json:"integrityImpact,omitempty"` + AvailabilityImpact *CVSS20Cia `json:"availabilityImpact,omitempty"` BaseScore float64 `json:"baseScore"` // required - Exploitability *CVSS20Exploitability `json:"exploitability"` - RemediationLevel *CVSS20RemediationLevel `json:"remediationLevel"` - ReportConfidence *CVSS20ReportConfidence `json:"reportConfidence"` - TemporalScore *float64 `json:"temporalScore"` - CollateralDamagePotential *CVSS20CollateralDamagePotential `json:"collateralDamagePotential"` - TargetDistribution *CVSS20TargetDistribution `json:"targetDistribution"` - ConfidentialityRequirement *CVSS20CiaRequirement `json:"confidentialityRequirement"` - IntegrityRequirement *CVSS20CiaRequirement `json:"integrityRequirement"` - AvailabilityRequirement *CVSS20CiaRequirement `json:"availabilityRequirement"` - EnvironmentalScore *float64 `json:"environmentalScore"` + Exploitability *CVSS20Exploitability `json:"exploitability,omitempty"` + RemediationLevel *CVSS20RemediationLevel `json:"remediationLevel,omitempty"` + ReportConfidence *CVSS20ReportConfidence `json:"reportConfidence,omitempty"` + TemporalScore *float64 `json:"temporalScore,omitempty"` + CollateralDamagePotential *CVSS20CollateralDamagePotential `json:"collateralDamagePotential,omitempty"` + TargetDistribution *CVSS20TargetDistribution `json:"targetDistribution,omitempty"` + ConfidentialityRequirement *CVSS20CiaRequirement `json:"confidentialityRequirement,omitempty"` + IntegrityRequirement *CVSS20CiaRequirement `json:"integrityRequirement,omitempty"` + AvailabilityRequirement *CVSS20CiaRequirement `json:"availabilityRequirement,omitempty"` + EnvironmentalScore *float64 `json:"environmentalScore,omitempty"` } // CVSSv3 holding a CVSS v3.x value type CVSSv3 struct { Version string `json:"version"` // required VectorString string `json:"vectorString"` // required - AttackVector *CVSS30AttackVector `json:"attackVector"` - AttackComplexity *CVSS30AttackComplexity `json:"attackComplexity"` - PrivilegesRequired *CVSS30PrivilegesRequired `json:"privilegesRequired"` - UserInteraction *CVSS30UserInteraction `json:"userInteraction"` - Scope *CVSS30Scope `json:"scope"` - ConfidentialityImpact *CVSS30Cia `json:"confidentialityImpact"` - IntegrityImpact CVSS30Cia `json:"integrityImpact"` - AvailabilityImpact *CVSS30Cia `json:"availabilityImpact"` + AttackVector *CVSS30AttackVector `json:"attackVector,omitempty"` + AttackComplexity *CVSS30AttackComplexity `json:"attackComplexity,omitempty"` + PrivilegesRequired *CVSS30PrivilegesRequired `json:"privilegesRequired,omitempty"` + UserInteraction *CVSS30UserInteraction `json:"userInteraction,omitempty"` + Scope *CVSS30Scope `json:"scope,omitempty"` + ConfidentialityImpact *CVSS30Cia `json:"confidentialityImpact,omitempty"` + IntegrityImpact CVSS30Cia `json:"integrityImpact,omitempty"` + AvailabilityImpact *CVSS30Cia `json:"availabilityImpact,omitempty"` BaseScore float64 `json:"baseScore"` // required BaseSeverity CVSS30Severity `json:"baseSeverity"` // required - ExploitCodeMaturity *CVSS30ExploitCodeMaturity `json:"exploitCodeMaturity"` - RemediationLevel *CVSS30RemediationLevel `json:"remediationLevel"` - ReportConfidence *CVSS30Confidence `json:"reportConfidence"` - TemporalScore *float64 `json:"temporalScore"` - TemporalSeverity *CVSS30Severity `json:"temporalSeverity"` - ConfidentialityRequirement *CVSS30CiaRequirement `json:"confidentialityRequirement"` - IntegrityRequirement *CVSS30CiaRequirement `json:"integrityRequirement"` - AvailabilityRequirement *CVSS30CiaRequirement `json:"availabilityRequirement"` - ModifiedAttackVector *CVSS30ModifiedAttackVector `json:"modifiedAttackVector"` - ModifiedAttackComplexity *CVSS30ModifiedAttackComplexity `json:"modifiedAttackComplexity"` - ModifiedPrivilegesRequired *CVSS30ModifiedPrivilegesRequired `json:"modifiedPrivilegesRequired"` - ModifiedUserInteraction *CVSS30ModifiedUserInteraction `json:"modifiedUserInteraction"` - ModifiedScope *CVSS30ModifiedScope `json:"modifiedScope"` - ModifiedConfidentialityImpact *CVSS30ModifiedCia `json:"modifiedConfidentialityImpact"` - ModifiedIntegrityImpact *CVSS30ModifiedCia `json:"modifiedIntegrityImpact"` - ModifiedAvailabilityImpact *CVSS30ModifiedCia `json:"modifiedAvailabilityImpact"` - EenvironmentalScore *float64 `json:"environmentalScore"` - EnvironmentalSeverity *CVSS30Severity `json:"environmentalSeverity"` + ExploitCodeMaturity *CVSS30ExploitCodeMaturity `json:"exploitCodeMaturity,omitempty"` + RemediationLevel *CVSS30RemediationLevel `json:"remediationLevel,omitempty"` + ReportConfidence *CVSS30Confidence `json:"reportConfidence,omitempty"` + TemporalScore *float64 `json:"temporalScore,omitempty"` + TemporalSeverity *CVSS30Severity `json:"temporalSeverity,omitempty"` + ConfidentialityRequirement *CVSS30CiaRequirement `json:"confidentialityRequirement,omitempty"` + IntegrityRequirement *CVSS30CiaRequirement `json:"integrityRequirement,omitempty"` + AvailabilityRequirement *CVSS30CiaRequirement `json:"availabilityRequirement,omitempty"` + ModifiedAttackVector *CVSS30ModifiedAttackVector `json:"modifiedAttackVector,omitempty"` + ModifiedAttackComplexity *CVSS30ModifiedAttackComplexity `json:"modifiedAttackComplexity,omitempty"` + ModifiedPrivilegesRequired *CVSS30ModifiedPrivilegesRequired `json:"modifiedPrivilegesRequired,omitempty"` + ModifiedUserInteraction *CVSS30ModifiedUserInteraction `json:"modifiedUserInteraction,omitempty"` + ModifiedScope *CVSS30ModifiedScope `json:"modifiedScope,omitempty"` + ModifiedConfidentialityImpact *CVSS30ModifiedCia `json:"modifiedConfidentialityImpact,omitempty"` + ModifiedIntegrityImpact *CVSS30ModifiedCia `json:"modifiedIntegrityImpact,omitempty"` + ModifiedAvailabilityImpact *CVSS30ModifiedCia `json:"modifiedAvailabilityImpact,omitempty"` + EenvironmentalScore *float64 `json:"environmentalScore,omitempty"` + EnvironmentalSeverity *CVSS30Severity `json:"environmentalSeverity,omitempty"` } // Score specifies information about (at least one) score of the vulnerability and for which From 3acabdf73b857dbc0bf1d873df7c915d8f4e9905 Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Mon, 4 Sep 2023 17:41:44 +0200 Subject: [PATCH 006/235] reusing TLPLabel and Category from models.go --- csaf/advisory.go | 78 +++++------------------------------------------- 1 file changed, 7 insertions(+), 71 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index 39604a8..3772db6 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -222,30 +222,10 @@ const CSAFVersion20 CSAFVersion = "2.0" var CSAFVersionPattern = alternativesUnmarshal(string(CSAFVersion20)) -// DocumentTLPLabel is the label of a TLP. -type DocumentTLPLabel string - -const ( - // DocumentTLPLabelAmber is the "AMBER" label. - DocumentTLPLabelAmber DocumentTLPLabel = "AMBER" - // DocumentTLPLabelGreen is the "GREEN" label. - DocumentTLPLabelGreen DocumentTLPLabel = "GREEN" - // DocumentTLPLabelRed is the "RED" label. - DocumentTLPLabelRed DocumentTLPLabel = "RED" - // DocumentTLPLabelWhite is the "WHITE" label. - DocumentTLPLabelWhite DocumentTLPLabel = "WHITE" -) - -var csafDocumentTLPLabelPattern = alternativesUnmarshal( - string(DocumentTLPLabelAmber), - string(DocumentTLPLabelGreen), - string(DocumentTLPLabelRed), - string(DocumentTLPLabelWhite)) - // TLP provides details about the TLP classification of the document. type TLP struct { - DocumentTLPLabel DocumentTLPLabel `json:"label"` // required - URL *string `json:"url,omitempty"` + DocumentTLPLabel TLPLabel `json:"label"` // required + URL *string `json:"url,omitempty"` } // DocumentDistribution describes rules for sharing a document. @@ -254,39 +234,13 @@ type DocumentDistribution struct { TLP *TLP `json:"tlp,omitempty"` } -// PublisherCategory is the category of a publisher. -type PublisherCategory string - -const ( - // CSAFPublisherCategoryCoordinator is the "coordinator" category. - CSAFPublisherCategoryCoordinator PublisherCategory = "coordinator" - // CSAFPublisherCategoryDiscoverer is the "discoverer" category. - CSAFPublisherCategoryDiscoverer PublisherCategory = "discoverer" - // CSAFPublisherCategoryOther is the "other" category. - CSAFPublisherCategoryOther PublisherCategory = "other" - // CSAFPublisherCategoryTranslator is the "translator" category. - CSAFPublisherCategoryTranslator PublisherCategory = "translator" - // CSAFPublisherCategoryUser is the "user" category. - CSAFPublisherCategoryUser PublisherCategory = "user" - // CSAFPublisherCategoryVendor is the "vendor" category. - CSAFPublisherCategoryVendor PublisherCategory = "vendor" -) - -var csafPublisherCategoryPattern = alternativesUnmarshal( - string(CSAFPublisherCategoryCoordinator), - string(CSAFPublisherCategoryDiscoverer), - string(CSAFPublisherCategoryOther), - string(CSAFPublisherCategoryTranslator), - string(CSAFPublisherCategoryUser), - string(CSAFPublisherCategoryVendor)) - // DocumentPublisher provides information about the publishing entity. type DocumentPublisher struct { - Category PublisherCategory `json:"category"` // required - ContactDetails *string `json:"contact_details,omitempty"` - IssuingAuthority *string `json:"issuing_authority,omitempty"` - Name string `json:"name"` // required - Namespace string `json:"namespace"` // required + Category Category `json:"category"` // required + ContactDetails *string `json:"contact_details,omitempty"` + IssuingAuthority *string `json:"issuing_authority,omitempty"` + Name string `json:"name"` // required + Namespace string `json:"namespace"` // required } // The version specifies a version string to denote clearly the evolution of the content of the document. @@ -836,24 +790,6 @@ func (rc *ReferenceCategory) UnmarshalText(data []byte) error { return err } -// UnmarshalText implements the encoding.TextUnmarshaller interface. -func (dtl *DocumentTLPLabel) UnmarshalText(data []byte) error { - s, err := csafDocumentTLPLabelPattern(data) - if err == nil { - *dtl = DocumentTLPLabel(s) - } - return err -} - -// UnmarshalText implements the encoding.TextUnmarshaller interface. -func (pc *PublisherCategory) UnmarshalText(data []byte) error { - s, err := csafPublisherCategoryPattern(data) - if err == nil { - *pc = PublisherCategory(s) - } - return err -} - // UnmarshalText implements the encoding.TextUnmarshaller interface. func (ts *TrackingStatus) UnmarshalText(data []byte) error { s, err := csafTrackingStatusPattern(data) From 7f36ecb48c338e4c6a42ed73d435cce0fe986e77 Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Tue, 5 Sep 2023 16:41:52 +0200 Subject: [PATCH 007/235] added missing types for CVSS2 + changed variable names for more consistency --- csaf/advisory.go | 87 ++++++++++++++++++++++++++++++++---------------- 1 file changed, 59 insertions(+), 28 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index 3772db6..b25a5f7 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -587,28 +587,41 @@ type Remediation struct { URL *string `json:"url,omitempty"` } -// CVSSv3Version is the version of a CVSSv3 item. -type CVSSv3Version string +// CVSSVersion2 is the version of a CVSS2 item. +type CVSSVersion2 string -// CVSSv3Version30 is version 3.0 of a CVSSv3 item. -const CVSSv3Version30 CVSSv3Version = "3.0" +// MetadataVersion20 is the current version of the schema. +const CVSSVersion20 CVSSVersion2 = "2.0" -// CVSSv3Version31 is version 3.1 of a CVSSv3 item. -const CVSSv3Version31 CVSSv3Version = "3.1" +var CVSSVersion2Pattern = alternativesUnmarshal(string(CVSSVersion20)) -// CVSSv3VectorString is the VectorString of a CVSSv3 item with version 3.x. -type CVSSv3VectorString string +// CVSS2VectorString is the VectorString of a CVSS2 item with version 3.x. +type CVSS2VectorString string -var CVSSv3VectorStringPattern = patternUnmarshal(`^CVSS:3[.][01]/((AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$`) +var CVSS2VectorStringPattern = patternUnmarshal(`^((AV:[NAL]|AC:[LMH]|Au:[MSN]|[CIA]:[NPC]|E:(U|POC|F|H|ND)|RL:(OF|TF|W|U|ND)|RC:(UC|UR|C|ND)|CDP:(N|L|LM|MH|H|ND)|TD:(N|L|M|H|ND)|[CIA]R:(L|M|H|ND))/)*(AV:[NAL]|AC:[LMH]|Au:[MSN]|[CIA]:[NPC]|E:(U|POC|F|H|ND)|RL:(OF|TF|W|U|ND)|RC:(UC|UR|C|ND)|CDP:(N|L|LM|MH|H|ND)|TD:(N|L|M|H|ND)|[CIA]R:(L|M|H|ND))$`) -var CVSSv3VersionPattern = alternativesUnmarshal( - string(CVSSv3Version30), - string(CVSSv3Version31)) +// CVSSVersion3 is the version of a CVSS3 item. +type CVSSVersion3 string -// CVSSv2 holding a CVSS v2.0 value -type CVSSv2 struct { - Version string `json:"version"` // required - VectorString string `json:"vectorString"` // required +// CVSS3Version30 is version 3.0 of a CVSS3 item. +const CVSSVersion30 CVSSVersion3 = "3.0" + +// CVSSVersion31 is version 3.1 of a CVSS3 item. +const CVSSVersion31 CVSSVersion3 = "3.1" + +var CVSS3VersionPattern = alternativesUnmarshal( + string(CVSSVersion30), + string(CVSSVersion31)) + +// CVSS3VectorString is the VectorString of a CVSS3 item with version 3.x. +type CVSS3VectorString string + +var CVSS3VectorStringPattern = patternUnmarshal(`^CVSS:3[.][01]/((AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$`) + +// CVSS2 holding a CVSS v2.0 value +type CVSS2 struct { + Version CVSSVersion2 `json:"version"` // required + VectorString CVSS2VectorString `json:"vectorString"` // required AccessVector *CVSS20AccessVector `json:"accessVector,omitempty"` AccessComplexity *CVSS20AccessComplexity `json:"accessComplexity,omitempty"` Authentication *CVSS20Authentication `json:"authentication,omitempty"` @@ -628,10 +641,10 @@ type CVSSv2 struct { EnvironmentalScore *float64 `json:"environmentalScore,omitempty"` } -// CVSSv3 holding a CVSS v3.x value -type CVSSv3 struct { - Version string `json:"version"` // required - VectorString string `json:"vectorString"` // required +// CVSS3 holding a CVSS v3.x value +type CVSS3 struct { + Version CVSSVersion3 `json:"version"` // required + VectorString CVSS3VectorString `json:"vectorString"` // required AttackVector *CVSS30AttackVector `json:"attackVector,omitempty"` AttackComplexity *CVSS30AttackComplexity `json:"attackComplexity,omitempty"` PrivilegesRequired *CVSS30PrivilegesRequired `json:"privilegesRequired,omitempty"` @@ -665,8 +678,8 @@ type CVSSv3 struct { // Score specifies information about (at least one) score of the vulnerability and for which // products the given value applies. A Score item has at least 2 properties. type Score struct { - CVSSv2 *CVSSv2 `json:"cvss_v2,omitempty"` - CVSSv3 *CVSSv3 `json:"cvss_v3,omitempty"` + CVSS2 *CVSS2 `json:"cvss_v2,omitempty"` + CVSS3 *CVSS3 `json:"cvss_v3,omitempty"` Products *Products `json:"products"` // required } @@ -953,19 +966,37 @@ func (wi *WeaknessID) UnmarshalText(data []byte) error { } // UnmarshalText implements the encoding.TextUnmarshaller interface. -func (cv *CVSSv3Version) UnmarshalText(data []byte) error { - s, err := CVSSv3VersionPattern(data) +func (cv *CVSSVersion2) UnmarshalText(data []byte) error { + s, err := CVSSVersion2Pattern(data) if err == nil { - *cv = CVSSv3Version(s) + *cv = CVSSVersion2(s) } return err } // UnmarshalText implements the encoding.TextUnmarshaller interface. -func (cvs *CVSSv3VectorString) UnmarshalText(data []byte) error { - s, err := CVSSv3VectorStringPattern(data) +func (cvs *CVSS2VectorString) UnmarshalText(data []byte) error { + s, err := CVSS2VectorStringPattern(data) if err == nil { - *cvs = CVSSv3VectorString(s) + *cvs = CVSS2VectorString(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (cv *CVSSVersion3) UnmarshalText(data []byte) error { + s, err := CVSS3VersionPattern(data) + if err == nil { + *cv = CVSSVersion3(s) + } + return err +} + +// UnmarshalText implements the encoding.TextUnmarshaller interface. +func (cvs *CVSS3VectorString) UnmarshalText(data []byte) error { + s, err := CVSS3VectorStringPattern(data) + if err == nil { + *cvs = CVSS3VectorString(s) } return err } From 22ef2a925ecc93cc63ac467c076f90d822ce19d1 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Tue, 5 Sep 2023 19:14:57 +0200 Subject: [PATCH 008/235] Unexport patterns --- csaf/advisory.go | 92 ++++++++++++++++++++++++------------------------ 1 file changed, 46 insertions(+), 46 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index b25a5f7..a55e29e 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -78,7 +78,7 @@ type Products []ProductID // FileHashValue represents the value of a hash. type FileHashValue string -var FileHashValuePattern = patternUnmarshal(`^[0-9a-fA-F]{32,}$`) +var fileHashValuePattern = patternUnmarshal(`^[0-9a-fA-F]{32,}$`) // FileHash is checksum hash. // Values for 'algorithm' are derived from the currently supported digests OpenSSL. Leading dashes were removed. @@ -96,12 +96,12 @@ type Hashes struct { // CPE represents a Common Platform Enumeration in an advisory. type CPE string -var CPEPattern = patternUnmarshal("^(cpe:2\\.3:[aho\\*\\-](:(((\\?*|\\*?)([a-zA-Z0-9\\-\\._]|(\\\\[\\\\\\*\\?!\"#\\$%&'\\(\\)\\+,/:;<=>@\\[\\]\\^`\\{\\|\\}~]))+(\\?*|\\*?))|[\\*\\-])){5}(:(([a-zA-Z]{2,3}(-([a-zA-Z]{2}|[0-9]{3}))?)|[\\*\\-]))(:(((\\?*|\\*?)([a-zA-Z0-9\\-\\._]|(\\\\[\\\\\\*\\?!\"#\\$%&'\\(\\)\\+,/:;<=>@\\[\\]\\^`\\{\\|\\}~]))+(\\?*|\\*?))|[\\*\\-])){4})|([c][pP][eE]:/[AHOaho]?(:[A-Za-z0-9\\._\\-~%]*){0,6})$") +var cpePattern = patternUnmarshal("^(cpe:2\\.3:[aho\\*\\-](:(((\\?*|\\*?)([a-zA-Z0-9\\-\\._]|(\\\\[\\\\\\*\\?!\"#\\$%&'\\(\\)\\+,/:;<=>@\\[\\]\\^`\\{\\|\\}~]))+(\\?*|\\*?))|[\\*\\-])){5}(:(([a-zA-Z]{2,3}(-([a-zA-Z]{2}|[0-9]{3}))?)|[\\*\\-]))(:(((\\?*|\\*?)([a-zA-Z0-9\\-\\._]|(\\\\[\\\\\\*\\?!\"#\\$%&'\\(\\)\\+,/:;<=>@\\[\\]\\^`\\{\\|\\}~]))+(\\?*|\\*?))|[\\*\\-])){4})|([c][pP][eE]:/[AHOaho]?(:[A-Za-z0-9\\._\\-~%]*){0,6})$") // PURL represents a package URL in an advisory. type PURL string -var PURLPattern = patternUnmarshal(`^pkg:[A-Za-z\\.\\-\\+][A-Za-z0-9\\.\\-\\+]*/.+`) +var pURLPattern = patternUnmarshal(`^pkg:[A-Za-z\\.\\-\\+][A-Za-z0-9\\.\\-\\+]*/.+`) // XGenericURI represents an identifier for a product. type XGenericURI struct { @@ -212,15 +212,15 @@ type AggregateSeverity struct { // DocumentCategory represents a category of a document. type DocumentCategory string -var DocumentCategoryPattern = patternUnmarshal(`^[^\\s\\-_\\.](.*[^\\s\\-_\\.])?$`) +var documentCategoryPattern = patternUnmarshal(`^[^\\s\\-_\\.](.*[^\\s\\-_\\.])?$`) -// CSAFVersion is the version of a document. -type CSAFVersion string +// Version is the version of a document. +type Version string -// CSAFVersion is the current version of CSAF. -const CSAFVersion20 CSAFVersion = "2.0" +// CSAFVersion20 is the current version of CSAF. +const CSAFVersion20 Version = "2.0" -var CSAFVersionPattern = alternativesUnmarshal(string(CSAFVersion20)) +var csafVersionPattern = alternativesUnmarshal(string(CSAFVersion20)) // TLP provides details about the TLP classification of the document. type TLP struct { @@ -243,10 +243,10 @@ type DocumentPublisher struct { Namespace string `json:"namespace"` // required } -// The version specifies a version string to denote clearly the evolution of the content of the document. -type Version string +// RevisionNumber specifies a version string to denote clearly the evolution of the content of the document. +type RevisionNumber string -var VersionPattern = patternUnmarshal(`^(0|[1-9][0-9]*)$|^((0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*) +var versionPattern = patternUnmarshal(`^(0|[1-9][0-9]*)$|^((0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*) (?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?)$`) // Engine contains information about the engine that generated the CSAF document. @@ -266,14 +266,14 @@ type Generator struct { // TrackingID is a unique identifier for the document. type TrackingID string -var TrackingIDPattern = patternUnmarshal("^[\\S](.*[\\S])?$") +var trackingIDPattern = patternUnmarshal("^[\\S](.*[\\S])?$") // Revision contains information about one revision of the document. type Revision struct { - Date string `json:"date"` // required - LegacyVersion *string `json:"legacy_version,omitempty"` - Number Version `json:"number"` // required - Summary string `json:"summary"` // required + Date string `json:"date"` // required + LegacyVersion *string `json:"legacy_version,omitempty"` + Number RevisionNumber `json:"number"` // required + Summary string `json:"summary"` // required } // TrackingStatus is the category of a publisher. @@ -302,20 +302,20 @@ type Tracking struct { InitialReleaseDate string `json:"initial_release_date"` // required RevisionHistory []Revision `json:"revision_history"` // required Status TrackingStatus `json:"status"` // required - Version Version `json:"version"` // required + Version RevisionNumber `json:"version"` // required } // Lang is a language identifier, corresponding to IETF BCP 47 / RFC 5646. type Lang string -var LangPattern = patternUnmarshal("^(([A-Za-z]{2,3}(-[A-Za-z]{3}(-[A-Za-z]{3}){0,2})?|[A-Za-z]{4,8})(-[A-Za-z]{4})?(-([A-Za-z]{2}|[0-9]{3}))?(-([A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(-[A-WY-Za-wy-z0-9](-[A-Za-z0-9]{2,8})+)*(-[Xx](-[A-Za-z0-9]{1,8})+)?|[Xx](-[A-Za-z0-9]{1,8})+|[Ii]-[Dd][Ee][Ff][Aa][Uu][Ll][Tt]|[Ii]-[Mm][Ii][Nn][Gg][Oo])$") +var langPattern = patternUnmarshal("^(([A-Za-z]{2,3}(-[A-Za-z]{3}(-[A-Za-z]{3}){0,2})?|[A-Za-z]{4,8})(-[A-Za-z]{4})?(-([A-Za-z]{2}|[0-9]{3}))?(-([A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(-[A-WY-Za-wy-z0-9](-[A-Za-z0-9]{2,8})+)*(-[Xx](-[A-Za-z0-9]{1,8})+)?|[Xx](-[A-Za-z0-9]{1,8})+|[Ii]-[Dd][Ee][Ff][Aa][Uu][Ll][Tt]|[Ii]-[Mm][Ii][Nn][Gg][Oo])$") // Document contains meta-data about an advisory. type Document struct { Acknowledgements []Acknowledgement `json:"acknowledgements,omitempty"` AggregateSeverity *AggregateSeverity `json:"aggregate_severity,omitempty"` Category DocumentCategory `json:"category"` // required - CSAFVersion CSAFVersion `json:"csaf_version"` // required + CSAFVersion Version `json:"csaf_version"` // required Distribution *DocumentDistribution `json:"distribution,omitempty"` Lang *Lang `json:"lang,omitempty"` Notes []*Note `json:"notes,omitempty"` @@ -384,12 +384,12 @@ type ProductTree struct { // CVE holds the MITRE standard Common Vulnerabilities and Exposures (CVE) tracking number for a vulnerability. type CVE string -var CVEPattern = patternUnmarshal("^CVE-[0-9]{4}-[0-9]{4,}$") +var cvePattern = patternUnmarshal("^CVE-[0-9]{4}-[0-9]{4,}$") // WeaknessID is the identifier of a weakness. type WeaknessID string -var WeaknessIDPattern = patternUnmarshal("^CWE-[1-9]\\d{0,5}$") +var weaknessIDPattern = patternUnmarshal("^CWE-[1-9]\\d{0,5}$") // CWE holds the MITRE standard Common Weakness Enumeration (CWE) for the weakness associated. type CWE struct { @@ -590,33 +590,33 @@ type Remediation struct { // CVSSVersion2 is the version of a CVSS2 item. type CVSSVersion2 string -// MetadataVersion20 is the current version of the schema. +// CVSSVersion20 is the current version of the schema. const CVSSVersion20 CVSSVersion2 = "2.0" -var CVSSVersion2Pattern = alternativesUnmarshal(string(CVSSVersion20)) +var cvssVersion2Pattern = alternativesUnmarshal(string(CVSSVersion20)) // CVSS2VectorString is the VectorString of a CVSS2 item with version 3.x. type CVSS2VectorString string -var CVSS2VectorStringPattern = patternUnmarshal(`^((AV:[NAL]|AC:[LMH]|Au:[MSN]|[CIA]:[NPC]|E:(U|POC|F|H|ND)|RL:(OF|TF|W|U|ND)|RC:(UC|UR|C|ND)|CDP:(N|L|LM|MH|H|ND)|TD:(N|L|M|H|ND)|[CIA]R:(L|M|H|ND))/)*(AV:[NAL]|AC:[LMH]|Au:[MSN]|[CIA]:[NPC]|E:(U|POC|F|H|ND)|RL:(OF|TF|W|U|ND)|RC:(UC|UR|C|ND)|CDP:(N|L|LM|MH|H|ND)|TD:(N|L|M|H|ND)|[CIA]R:(L|M|H|ND))$`) +var cvss2VectorStringPattern = patternUnmarshal(`^((AV:[NAL]|AC:[LMH]|Au:[MSN]|[CIA]:[NPC]|E:(U|POC|F|H|ND)|RL:(OF|TF|W|U|ND)|RC:(UC|UR|C|ND)|CDP:(N|L|LM|MH|H|ND)|TD:(N|L|M|H|ND)|[CIA]R:(L|M|H|ND))/)*(AV:[NAL]|AC:[LMH]|Au:[MSN]|[CIA]:[NPC]|E:(U|POC|F|H|ND)|RL:(OF|TF|W|U|ND)|RC:(UC|UR|C|ND)|CDP:(N|L|LM|MH|H|ND)|TD:(N|L|M|H|ND)|[CIA]R:(L|M|H|ND))$`) // CVSSVersion3 is the version of a CVSS3 item. type CVSSVersion3 string -// CVSS3Version30 is version 3.0 of a CVSS3 item. +// CVSSVersion30 is version 3.0 of a CVSS3 item. const CVSSVersion30 CVSSVersion3 = "3.0" // CVSSVersion31 is version 3.1 of a CVSS3 item. const CVSSVersion31 CVSSVersion3 = "3.1" -var CVSS3VersionPattern = alternativesUnmarshal( +var cvss3VersionPattern = alternativesUnmarshal( string(CVSSVersion30), string(CVSSVersion31)) // CVSS3VectorString is the VectorString of a CVSS3 item with version 3.x. type CVSS3VectorString string -var CVSS3VectorStringPattern = patternUnmarshal(`^CVSS:3[.][01]/((AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$`) +var cvss3VectorStringPattern = patternUnmarshal(`^CVSS:3[.][01]/((AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$`) // CVSS2 holding a CVSS v2.0 value type CVSS2 struct { @@ -877,7 +877,7 @@ func (tc *ThreatCategory) UnmarshalText(data []byte) error { // UnmarshalText implements the encoding.TextUnmarshaller interface. func (cpe *CPE) UnmarshalText(data []byte) error { - s, err := CPEPattern(data) + s, err := cpePattern(data) if err == nil { *cpe = CPE(s) } @@ -886,7 +886,7 @@ func (cpe *CPE) UnmarshalText(data []byte) error { // UnmarshalText implements the encoding.TextUnmarshaller interface. func (fhv *FileHashValue) UnmarshalText(data []byte) error { - s, err := FileHashValuePattern(data) + s, err := fileHashValuePattern(data) if err == nil { *fhv = FileHashValue(s) } @@ -895,7 +895,7 @@ func (fhv *FileHashValue) UnmarshalText(data []byte) error { // UnmarshalText implements the encoding.TextUnmarshaller interface. func (p *PURL) UnmarshalText(data []byte) error { - s, err := PURLPattern(data) + s, err := pURLPattern(data) if err == nil { *p = PURL(s) } @@ -904,7 +904,7 @@ func (p *PURL) UnmarshalText(data []byte) error { // UnmarshalText implements the encoding.TextUnmarshaller interface. func (l *Lang) UnmarshalText(data []byte) error { - s, err := LangPattern(data) + s, err := langPattern(data) if err == nil { *l = Lang(s) } @@ -912,17 +912,17 @@ func (l *Lang) UnmarshalText(data []byte) error { } // UnmarshalText implements the encoding.TextUnmarshaller interface. -func (v *Version) UnmarshalText(data []byte) error { - s, err := VersionPattern(data) +func (v *RevisionNumber) UnmarshalText(data []byte) error { + s, err := versionPattern(data) if err == nil { - *v = Version(s) + *v = RevisionNumber(s) } return err } // UnmarshalText implements the encoding.TextUnmarshaller interface. func (dc *DocumentCategory) UnmarshalText(data []byte) error { - s, err := DocumentCategoryPattern(data) + s, err := documentCategoryPattern(data) if err == nil { *dc = DocumentCategory(s) } @@ -930,17 +930,17 @@ func (dc *DocumentCategory) UnmarshalText(data []byte) error { } // UnmarshalText implements the encoding.TextUnmarshaller interface. -func (cv *CSAFVersion) UnmarshalText(data []byte) error { - s, err := CSAFVersionPattern(data) +func (cv *Version) UnmarshalText(data []byte) error { + s, err := csafVersionPattern(data) if err == nil { - *cv = CSAFVersion(s) + *cv = Version(s) } return err } // UnmarshalText implements the encoding.TextUnmarshaller interface. func (ti *TrackingID) UnmarshalText(data []byte) error { - s, err := TrackingIDPattern(data) + s, err := trackingIDPattern(data) if err == nil { *ti = TrackingID(s) } @@ -949,7 +949,7 @@ func (ti *TrackingID) UnmarshalText(data []byte) error { // UnmarshalText implements the encoding.TextUnmarshaller interface. func (cve *CVE) UnmarshalText(data []byte) error { - s, err := CVEPattern(data) + s, err := cvePattern(data) if err == nil { *cve = CVE(s) } @@ -958,7 +958,7 @@ func (cve *CVE) UnmarshalText(data []byte) error { // UnmarshalText implements the encoding.TextUnmarshaller interface. func (wi *WeaknessID) UnmarshalText(data []byte) error { - s, err := WeaknessIDPattern(data) + s, err := weaknessIDPattern(data) if err == nil { *wi = WeaknessID(s) } @@ -967,7 +967,7 @@ func (wi *WeaknessID) UnmarshalText(data []byte) error { // UnmarshalText implements the encoding.TextUnmarshaller interface. func (cv *CVSSVersion2) UnmarshalText(data []byte) error { - s, err := CVSSVersion2Pattern(data) + s, err := cvssVersion2Pattern(data) if err == nil { *cv = CVSSVersion2(s) } @@ -976,7 +976,7 @@ func (cv *CVSSVersion2) UnmarshalText(data []byte) error { // UnmarshalText implements the encoding.TextUnmarshaller interface. func (cvs *CVSS2VectorString) UnmarshalText(data []byte) error { - s, err := CVSS2VectorStringPattern(data) + s, err := cvss2VectorStringPattern(data) if err == nil { *cvs = CVSS2VectorString(s) } @@ -985,7 +985,7 @@ func (cvs *CVSS2VectorString) UnmarshalText(data []byte) error { // UnmarshalText implements the encoding.TextUnmarshaller interface. func (cv *CVSSVersion3) UnmarshalText(data []byte) error { - s, err := CVSS3VersionPattern(data) + s, err := cvss3VersionPattern(data) if err == nil { *cv = CVSSVersion3(s) } @@ -994,7 +994,7 @@ func (cv *CVSSVersion3) UnmarshalText(data []byte) error { // UnmarshalText implements the encoding.TextUnmarshaller interface. func (cvs *CVSS3VectorString) UnmarshalText(data []byte) error { - s, err := CVSS3VectorStringPattern(data) + s, err := cvss3VectorStringPattern(data) if err == nil { *cvs = CVSS3VectorString(s) } From f45d273af928d566950865d6664f3d8e2e67d0cd Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Tue, 5 Sep 2023 19:53:48 +0200 Subject: [PATCH 009/235] fixed versionPattern --- csaf/advisory.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index a55e29e..4732ed3 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -246,8 +246,7 @@ type DocumentPublisher struct { // RevisionNumber specifies a version string to denote clearly the evolution of the content of the document. type RevisionNumber string -var versionPattern = patternUnmarshal(`^(0|[1-9][0-9]*)$|^((0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*) - (?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?)$`) +var versionPattern = patternUnmarshal("^(0|[1-9][0-9]*)$|^((0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?)$") // Engine contains information about the engine that generated the CSAF document. type Engine struct { From b03df5508a717e977155e49c9db91aa5a59ebc80 Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Wed, 6 Sep 2023 15:44:56 +0200 Subject: [PATCH 010/235] added explanation for cvss3VectorStringPattern --- csaf/advisory.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/csaf/advisory.go b/csaf/advisory.go index 4732ed3..16d3503 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -615,6 +615,8 @@ var cvss3VersionPattern = alternativesUnmarshal( // CVSS3VectorString is the VectorString of a CVSS3 item with version 3.x. type CVSS3VectorString string +// cvss3VectorStringPattern is a combination of the vectorString patterns of CVSS 3.0 +// and CVSS 3.1 since the only difference is the number directly after the first dot. var cvss3VectorStringPattern = patternUnmarshal(`^CVSS:3[.][01]/((AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$`) // CVSS2 holding a CVSS v2.0 value From 4206c2e4b370899ca1d313f67c79f5e053fd96d8 Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Wed, 6 Sep 2023 15:51:47 +0200 Subject: [PATCH 011/235] only using enums from CVSS 3.0 --- csaf/advisory.go | 60 +++--- csaf/cvss30enums.go | 500 -------------------------------------------- csaf/cvss31enums.go | 500 -------------------------------------------- csaf/cvss3enums.go | 500 ++++++++++++++++++++++++++++++++++++++++++++ csaf/doc.go | 5 +- 5 files changed, 533 insertions(+), 1032 deletions(-) delete mode 100644 csaf/cvss30enums.go delete mode 100644 csaf/cvss31enums.go create mode 100644 csaf/cvss3enums.go diff --git a/csaf/advisory.go b/csaf/advisory.go index 16d3503..1fb8a5d 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -644,36 +644,36 @@ type CVSS2 struct { // CVSS3 holding a CVSS v3.x value type CVSS3 struct { - Version CVSSVersion3 `json:"version"` // required - VectorString CVSS3VectorString `json:"vectorString"` // required - AttackVector *CVSS30AttackVector `json:"attackVector,omitempty"` - AttackComplexity *CVSS30AttackComplexity `json:"attackComplexity,omitempty"` - PrivilegesRequired *CVSS30PrivilegesRequired `json:"privilegesRequired,omitempty"` - UserInteraction *CVSS30UserInteraction `json:"userInteraction,omitempty"` - Scope *CVSS30Scope `json:"scope,omitempty"` - ConfidentialityImpact *CVSS30Cia `json:"confidentialityImpact,omitempty"` - IntegrityImpact CVSS30Cia `json:"integrityImpact,omitempty"` - AvailabilityImpact *CVSS30Cia `json:"availabilityImpact,omitempty"` - BaseScore float64 `json:"baseScore"` // required - BaseSeverity CVSS30Severity `json:"baseSeverity"` // required - ExploitCodeMaturity *CVSS30ExploitCodeMaturity `json:"exploitCodeMaturity,omitempty"` - RemediationLevel *CVSS30RemediationLevel `json:"remediationLevel,omitempty"` - ReportConfidence *CVSS30Confidence `json:"reportConfidence,omitempty"` - TemporalScore *float64 `json:"temporalScore,omitempty"` - TemporalSeverity *CVSS30Severity `json:"temporalSeverity,omitempty"` - ConfidentialityRequirement *CVSS30CiaRequirement `json:"confidentialityRequirement,omitempty"` - IntegrityRequirement *CVSS30CiaRequirement `json:"integrityRequirement,omitempty"` - AvailabilityRequirement *CVSS30CiaRequirement `json:"availabilityRequirement,omitempty"` - ModifiedAttackVector *CVSS30ModifiedAttackVector `json:"modifiedAttackVector,omitempty"` - ModifiedAttackComplexity *CVSS30ModifiedAttackComplexity `json:"modifiedAttackComplexity,omitempty"` - ModifiedPrivilegesRequired *CVSS30ModifiedPrivilegesRequired `json:"modifiedPrivilegesRequired,omitempty"` - ModifiedUserInteraction *CVSS30ModifiedUserInteraction `json:"modifiedUserInteraction,omitempty"` - ModifiedScope *CVSS30ModifiedScope `json:"modifiedScope,omitempty"` - ModifiedConfidentialityImpact *CVSS30ModifiedCia `json:"modifiedConfidentialityImpact,omitempty"` - ModifiedIntegrityImpact *CVSS30ModifiedCia `json:"modifiedIntegrityImpact,omitempty"` - ModifiedAvailabilityImpact *CVSS30ModifiedCia `json:"modifiedAvailabilityImpact,omitempty"` - EenvironmentalScore *float64 `json:"environmentalScore,omitempty"` - EnvironmentalSeverity *CVSS30Severity `json:"environmentalSeverity,omitempty"` + Version CVSSVersion3 `json:"version"` // required + VectorString CVSS3VectorString `json:"vectorString"` // required + AttackVector *CVSS3AttackVector `json:"attackVector,omitempty"` + AttackComplexity *CVSS3AttackComplexity `json:"attackComplexity,omitempty"` + PrivilegesRequired *CVSS3PrivilegesRequired `json:"privilegesRequired,omitempty"` + UserInteraction *CVSS3UserInteraction `json:"userInteraction,omitempty"` + Scope *CVSS3Scope `json:"scope,omitempty"` + ConfidentialityImpact *CVSS3Cia `json:"confidentialityImpact,omitempty"` + IntegrityImpact CVSS3Cia `json:"integrityImpact,omitempty"` + AvailabilityImpact *CVSS3Cia `json:"availabilityImpact,omitempty"` + BaseScore float64 `json:"baseScore"` // required + BaseSeverity CVSS3Severity `json:"baseSeverity"` // required + ExploitCodeMaturity *CVSS3ExploitCodeMaturity `json:"exploitCodeMaturity,omitempty"` + RemediationLevel *CVSS3RemediationLevel `json:"remediationLevel,omitempty"` + ReportConfidence *CVSS3Confidence `json:"reportConfidence,omitempty"` + TemporalScore *float64 `json:"temporalScore,omitempty"` + TemporalSeverity *CVSS3Severity `json:"temporalSeverity,omitempty"` + ConfidentialityRequirement *CVSS3CiaRequirement `json:"confidentialityRequirement,omitempty"` + IntegrityRequirement *CVSS3CiaRequirement `json:"integrityRequirement,omitempty"` + AvailabilityRequirement *CVSS3CiaRequirement `json:"availabilityRequirement,omitempty"` + ModifiedAttackVector *CVSS3ModifiedAttackVector `json:"modifiedAttackVector,omitempty"` + ModifiedAttackComplexity *CVSS3ModifiedAttackComplexity `json:"modifiedAttackComplexity,omitempty"` + ModifiedPrivilegesRequired *CVSS3ModifiedPrivilegesRequired `json:"modifiedPrivilegesRequired,omitempty"` + ModifiedUserInteraction *CVSS3ModifiedUserInteraction `json:"modifiedUserInteraction,omitempty"` + ModifiedScope *CVSS3ModifiedScope `json:"modifiedScope,omitempty"` + ModifiedConfidentialityImpact *CVSS3ModifiedCia `json:"modifiedConfidentialityImpact,omitempty"` + ModifiedIntegrityImpact *CVSS3ModifiedCia `json:"modifiedIntegrityImpact,omitempty"` + ModifiedAvailabilityImpact *CVSS3ModifiedCia `json:"modifiedAvailabilityImpact,omitempty"` + EenvironmentalScore *float64 `json:"environmentalScore,omitempty"` + EnvironmentalSeverity *CVSS3Severity `json:"environmentalSeverity,omitempty"` } // Score specifies information about (at least one) score of the vulnerability and for which diff --git a/csaf/cvss30enums.go b/csaf/cvss30enums.go deleted file mode 100644 index 7524174..0000000 --- a/csaf/cvss30enums.go +++ /dev/null @@ -1,500 +0,0 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. -// -// SPDX-License-Identifier: MIT -// -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH -// -// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! - -package csaf - -// CVSS30AttackComplexity represents the attackComplexityType in CVSS30. -type CVSS30AttackComplexity string - -const ( - // CVSS30AttackComplexityHigh is a constant for "HIGH". - CVSS30AttackComplexityHigh CVSS30AttackComplexity = "HIGH" - // CVSS30AttackComplexityLow is a constant for "LOW". - CVSS30AttackComplexityLow CVSS30AttackComplexity = "LOW" -) - -var cvss30AttackComplexityPattern = alternativesUnmarshal( - string(CVSS30AttackComplexityHigh), - string(CVSS30AttackComplexityLow), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30AttackComplexity) UnmarshalText(data []byte) error { - s, err := cvss30AttackComplexityPattern(data) - if err == nil { - *e = CVSS30AttackComplexity(s) - } - return err -} - -// CVSS30AttackVector represents the attackVectorType in CVSS30. -type CVSS30AttackVector string - -const ( - // CVSS30AttackVectorNetwork is a constant for "NETWORK". - CVSS30AttackVectorNetwork CVSS30AttackVector = "NETWORK" - // CVSS30AttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK". - CVSS30AttackVectorAdjacentNetwork CVSS30AttackVector = "ADJACENT_NETWORK" - // CVSS30AttackVectorLocal is a constant for "LOCAL". - CVSS30AttackVectorLocal CVSS30AttackVector = "LOCAL" - // CVSS30AttackVectorPhysical is a constant for "PHYSICAL". - CVSS30AttackVectorPhysical CVSS30AttackVector = "PHYSICAL" -) - -var cvss30AttackVectorPattern = alternativesUnmarshal( - string(CVSS30AttackVectorNetwork), - string(CVSS30AttackVectorAdjacentNetwork), - string(CVSS30AttackVectorLocal), - string(CVSS30AttackVectorPhysical), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30AttackVector) UnmarshalText(data []byte) error { - s, err := cvss30AttackVectorPattern(data) - if err == nil { - *e = CVSS30AttackVector(s) - } - return err -} - -// CVSS30CiaRequirement represents the ciaRequirementType in CVSS30. -type CVSS30CiaRequirement string - -const ( - // CVSS30CiaRequirementLow is a constant for "LOW". - CVSS30CiaRequirementLow CVSS30CiaRequirement = "LOW" - // CVSS30CiaRequirementMedium is a constant for "MEDIUM". - CVSS30CiaRequirementMedium CVSS30CiaRequirement = "MEDIUM" - // CVSS30CiaRequirementHigh is a constant for "HIGH". - CVSS30CiaRequirementHigh CVSS30CiaRequirement = "HIGH" - // CVSS30CiaRequirementNotDefined is a constant for "NOT_DEFINED". - CVSS30CiaRequirementNotDefined CVSS30CiaRequirement = "NOT_DEFINED" -) - -var cvss30CiaRequirementPattern = alternativesUnmarshal( - string(CVSS30CiaRequirementLow), - string(CVSS30CiaRequirementMedium), - string(CVSS30CiaRequirementHigh), - string(CVSS30CiaRequirementNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30CiaRequirement) UnmarshalText(data []byte) error { - s, err := cvss30CiaRequirementPattern(data) - if err == nil { - *e = CVSS30CiaRequirement(s) - } - return err -} - -// CVSS30Cia represents the ciaType in CVSS30. -type CVSS30Cia string - -const ( - // CVSS30CiaNone is a constant for "NONE". - CVSS30CiaNone CVSS30Cia = "NONE" - // CVSS30CiaLow is a constant for "LOW". - CVSS30CiaLow CVSS30Cia = "LOW" - // CVSS30CiaHigh is a constant for "HIGH". - CVSS30CiaHigh CVSS30Cia = "HIGH" -) - -var cvss30CiaPattern = alternativesUnmarshal( - string(CVSS30CiaNone), - string(CVSS30CiaLow), - string(CVSS30CiaHigh), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30Cia) UnmarshalText(data []byte) error { - s, err := cvss30CiaPattern(data) - if err == nil { - *e = CVSS30Cia(s) - } - return err -} - -// CVSS30Confidence represents the confidenceType in CVSS30. -type CVSS30Confidence string - -const ( - // CVSS30ConfidenceUnknown is a constant for "UNKNOWN". - CVSS30ConfidenceUnknown CVSS30Confidence = "UNKNOWN" - // CVSS30ConfidenceReasonable is a constant for "REASONABLE". - CVSS30ConfidenceReasonable CVSS30Confidence = "REASONABLE" - // CVSS30ConfidenceConfirmed is a constant for "CONFIRMED". - CVSS30ConfidenceConfirmed CVSS30Confidence = "CONFIRMED" - // CVSS30ConfidenceNotDefined is a constant for "NOT_DEFINED". - CVSS30ConfidenceNotDefined CVSS30Confidence = "NOT_DEFINED" -) - -var cvss30ConfidencePattern = alternativesUnmarshal( - string(CVSS30ConfidenceUnknown), - string(CVSS30ConfidenceReasonable), - string(CVSS30ConfidenceConfirmed), - string(CVSS30ConfidenceNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30Confidence) UnmarshalText(data []byte) error { - s, err := cvss30ConfidencePattern(data) - if err == nil { - *e = CVSS30Confidence(s) - } - return err -} - -// CVSS30ExploitCodeMaturity represents the exploitCodeMaturityType in CVSS30. -type CVSS30ExploitCodeMaturity string - -const ( - // CVSS30ExploitCodeMaturityUnproven is a constant for "UNPROVEN". - CVSS30ExploitCodeMaturityUnproven CVSS30ExploitCodeMaturity = "UNPROVEN" - // CVSS30ExploitCodeMaturityProofOfConcept is a constant for "PROOF_OF_CONCEPT". - CVSS30ExploitCodeMaturityProofOfConcept CVSS30ExploitCodeMaturity = "PROOF_OF_CONCEPT" - // CVSS30ExploitCodeMaturityFunctional is a constant for "FUNCTIONAL". - CVSS30ExploitCodeMaturityFunctional CVSS30ExploitCodeMaturity = "FUNCTIONAL" - // CVSS30ExploitCodeMaturityHigh is a constant for "HIGH". - CVSS30ExploitCodeMaturityHigh CVSS30ExploitCodeMaturity = "HIGH" - // CVSS30ExploitCodeMaturityNotDefined is a constant for "NOT_DEFINED". - CVSS30ExploitCodeMaturityNotDefined CVSS30ExploitCodeMaturity = "NOT_DEFINED" -) - -var cvss30ExploitCodeMaturityPattern = alternativesUnmarshal( - string(CVSS30ExploitCodeMaturityUnproven), - string(CVSS30ExploitCodeMaturityProofOfConcept), - string(CVSS30ExploitCodeMaturityFunctional), - string(CVSS30ExploitCodeMaturityHigh), - string(CVSS30ExploitCodeMaturityNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30ExploitCodeMaturity) UnmarshalText(data []byte) error { - s, err := cvss30ExploitCodeMaturityPattern(data) - if err == nil { - *e = CVSS30ExploitCodeMaturity(s) - } - return err -} - -// CVSS30ModifiedAttackComplexity represents the modifiedAttackComplexityType in CVSS30. -type CVSS30ModifiedAttackComplexity string - -const ( - // CVSS30ModifiedAttackComplexityHigh is a constant for "HIGH". - CVSS30ModifiedAttackComplexityHigh CVSS30ModifiedAttackComplexity = "HIGH" - // CVSS30ModifiedAttackComplexityLow is a constant for "LOW". - CVSS30ModifiedAttackComplexityLow CVSS30ModifiedAttackComplexity = "LOW" - // CVSS30ModifiedAttackComplexityNotDefined is a constant for "NOT_DEFINED". - CVSS30ModifiedAttackComplexityNotDefined CVSS30ModifiedAttackComplexity = "NOT_DEFINED" -) - -var cvss30ModifiedAttackComplexityPattern = alternativesUnmarshal( - string(CVSS30ModifiedAttackComplexityHigh), - string(CVSS30ModifiedAttackComplexityLow), - string(CVSS30ModifiedAttackComplexityNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30ModifiedAttackComplexity) UnmarshalText(data []byte) error { - s, err := cvss30ModifiedAttackComplexityPattern(data) - if err == nil { - *e = CVSS30ModifiedAttackComplexity(s) - } - return err -} - -// CVSS30ModifiedAttackVector represents the modifiedAttackVectorType in CVSS30. -type CVSS30ModifiedAttackVector string - -const ( - // CVSS30ModifiedAttackVectorNetwork is a constant for "NETWORK". - CVSS30ModifiedAttackVectorNetwork CVSS30ModifiedAttackVector = "NETWORK" - // CVSS30ModifiedAttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK". - CVSS30ModifiedAttackVectorAdjacentNetwork CVSS30ModifiedAttackVector = "ADJACENT_NETWORK" - // CVSS30ModifiedAttackVectorLocal is a constant for "LOCAL". - CVSS30ModifiedAttackVectorLocal CVSS30ModifiedAttackVector = "LOCAL" - // CVSS30ModifiedAttackVectorPhysical is a constant for "PHYSICAL". - CVSS30ModifiedAttackVectorPhysical CVSS30ModifiedAttackVector = "PHYSICAL" - // CVSS30ModifiedAttackVectorNotDefined is a constant for "NOT_DEFINED". - CVSS30ModifiedAttackVectorNotDefined CVSS30ModifiedAttackVector = "NOT_DEFINED" -) - -var cvss30ModifiedAttackVectorPattern = alternativesUnmarshal( - string(CVSS30ModifiedAttackVectorNetwork), - string(CVSS30ModifiedAttackVectorAdjacentNetwork), - string(CVSS30ModifiedAttackVectorLocal), - string(CVSS30ModifiedAttackVectorPhysical), - string(CVSS30ModifiedAttackVectorNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30ModifiedAttackVector) UnmarshalText(data []byte) error { - s, err := cvss30ModifiedAttackVectorPattern(data) - if err == nil { - *e = CVSS30ModifiedAttackVector(s) - } - return err -} - -// CVSS30ModifiedCia represents the modifiedCiaType in CVSS30. -type CVSS30ModifiedCia string - -const ( - // CVSS30ModifiedCiaNone is a constant for "NONE". - CVSS30ModifiedCiaNone CVSS30ModifiedCia = "NONE" - // CVSS30ModifiedCiaLow is a constant for "LOW". - CVSS30ModifiedCiaLow CVSS30ModifiedCia = "LOW" - // CVSS30ModifiedCiaHigh is a constant for "HIGH". - CVSS30ModifiedCiaHigh CVSS30ModifiedCia = "HIGH" - // CVSS30ModifiedCiaNotDefined is a constant for "NOT_DEFINED". - CVSS30ModifiedCiaNotDefined CVSS30ModifiedCia = "NOT_DEFINED" -) - -var cvss30ModifiedCiaPattern = alternativesUnmarshal( - string(CVSS30ModifiedCiaNone), - string(CVSS30ModifiedCiaLow), - string(CVSS30ModifiedCiaHigh), - string(CVSS30ModifiedCiaNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30ModifiedCia) UnmarshalText(data []byte) error { - s, err := cvss30ModifiedCiaPattern(data) - if err == nil { - *e = CVSS30ModifiedCia(s) - } - return err -} - -// CVSS30ModifiedPrivilegesRequired represents the modifiedPrivilegesRequiredType in CVSS30. -type CVSS30ModifiedPrivilegesRequired string - -const ( - // CVSS30ModifiedPrivilegesRequiredHigh is a constant for "HIGH". - CVSS30ModifiedPrivilegesRequiredHigh CVSS30ModifiedPrivilegesRequired = "HIGH" - // CVSS30ModifiedPrivilegesRequiredLow is a constant for "LOW". - CVSS30ModifiedPrivilegesRequiredLow CVSS30ModifiedPrivilegesRequired = "LOW" - // CVSS30ModifiedPrivilegesRequiredNone is a constant for "NONE". - CVSS30ModifiedPrivilegesRequiredNone CVSS30ModifiedPrivilegesRequired = "NONE" - // CVSS30ModifiedPrivilegesRequiredNotDefined is a constant for "NOT_DEFINED". - CVSS30ModifiedPrivilegesRequiredNotDefined CVSS30ModifiedPrivilegesRequired = "NOT_DEFINED" -) - -var cvss30ModifiedPrivilegesRequiredPattern = alternativesUnmarshal( - string(CVSS30ModifiedPrivilegesRequiredHigh), - string(CVSS30ModifiedPrivilegesRequiredLow), - string(CVSS30ModifiedPrivilegesRequiredNone), - string(CVSS30ModifiedPrivilegesRequiredNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30ModifiedPrivilegesRequired) UnmarshalText(data []byte) error { - s, err := cvss30ModifiedPrivilegesRequiredPattern(data) - if err == nil { - *e = CVSS30ModifiedPrivilegesRequired(s) - } - return err -} - -// CVSS30ModifiedScope represents the modifiedScopeType in CVSS30. -type CVSS30ModifiedScope string - -const ( - // CVSS30ModifiedScopeUnchanged is a constant for "UNCHANGED". - CVSS30ModifiedScopeUnchanged CVSS30ModifiedScope = "UNCHANGED" - // CVSS30ModifiedScopeChanged is a constant for "CHANGED". - CVSS30ModifiedScopeChanged CVSS30ModifiedScope = "CHANGED" - // CVSS30ModifiedScopeNotDefined is a constant for "NOT_DEFINED". - CVSS30ModifiedScopeNotDefined CVSS30ModifiedScope = "NOT_DEFINED" -) - -var cvss30ModifiedScopePattern = alternativesUnmarshal( - string(CVSS30ModifiedScopeUnchanged), - string(CVSS30ModifiedScopeChanged), - string(CVSS30ModifiedScopeNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30ModifiedScope) UnmarshalText(data []byte) error { - s, err := cvss30ModifiedScopePattern(data) - if err == nil { - *e = CVSS30ModifiedScope(s) - } - return err -} - -// CVSS30ModifiedUserInteraction represents the modifiedUserInteractionType in CVSS30. -type CVSS30ModifiedUserInteraction string - -const ( - // CVSS30ModifiedUserInteractionNone is a constant for "NONE". - CVSS30ModifiedUserInteractionNone CVSS30ModifiedUserInteraction = "NONE" - // CVSS30ModifiedUserInteractionRequired is a constant for "REQUIRED". - CVSS30ModifiedUserInteractionRequired CVSS30ModifiedUserInteraction = "REQUIRED" - // CVSS30ModifiedUserInteractionNotDefined is a constant for "NOT_DEFINED". - CVSS30ModifiedUserInteractionNotDefined CVSS30ModifiedUserInteraction = "NOT_DEFINED" -) - -var cvss30ModifiedUserInteractionPattern = alternativesUnmarshal( - string(CVSS30ModifiedUserInteractionNone), - string(CVSS30ModifiedUserInteractionRequired), - string(CVSS30ModifiedUserInteractionNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30ModifiedUserInteraction) UnmarshalText(data []byte) error { - s, err := cvss30ModifiedUserInteractionPattern(data) - if err == nil { - *e = CVSS30ModifiedUserInteraction(s) - } - return err -} - -// CVSS30PrivilegesRequired represents the privilegesRequiredType in CVSS30. -type CVSS30PrivilegesRequired string - -const ( - // CVSS30PrivilegesRequiredHigh is a constant for "HIGH". - CVSS30PrivilegesRequiredHigh CVSS30PrivilegesRequired = "HIGH" - // CVSS30PrivilegesRequiredLow is a constant for "LOW". - CVSS30PrivilegesRequiredLow CVSS30PrivilegesRequired = "LOW" - // CVSS30PrivilegesRequiredNone is a constant for "NONE". - CVSS30PrivilegesRequiredNone CVSS30PrivilegesRequired = "NONE" -) - -var cvss30PrivilegesRequiredPattern = alternativesUnmarshal( - string(CVSS30PrivilegesRequiredHigh), - string(CVSS30PrivilegesRequiredLow), - string(CVSS30PrivilegesRequiredNone), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30PrivilegesRequired) UnmarshalText(data []byte) error { - s, err := cvss30PrivilegesRequiredPattern(data) - if err == nil { - *e = CVSS30PrivilegesRequired(s) - } - return err -} - -// CVSS30RemediationLevel represents the remediationLevelType in CVSS30. -type CVSS30RemediationLevel string - -const ( - // CVSS30RemediationLevelOfficialFix is a constant for "OFFICIAL_FIX". - CVSS30RemediationLevelOfficialFix CVSS30RemediationLevel = "OFFICIAL_FIX" - // CVSS30RemediationLevelTemporaryFix is a constant for "TEMPORARY_FIX". - CVSS30RemediationLevelTemporaryFix CVSS30RemediationLevel = "TEMPORARY_FIX" - // CVSS30RemediationLevelWorkaround is a constant for "WORKAROUND". - CVSS30RemediationLevelWorkaround CVSS30RemediationLevel = "WORKAROUND" - // CVSS30RemediationLevelUnavailable is a constant for "UNAVAILABLE". - CVSS30RemediationLevelUnavailable CVSS30RemediationLevel = "UNAVAILABLE" - // CVSS30RemediationLevelNotDefined is a constant for "NOT_DEFINED". - CVSS30RemediationLevelNotDefined CVSS30RemediationLevel = "NOT_DEFINED" -) - -var cvss30RemediationLevelPattern = alternativesUnmarshal( - string(CVSS30RemediationLevelOfficialFix), - string(CVSS30RemediationLevelTemporaryFix), - string(CVSS30RemediationLevelWorkaround), - string(CVSS30RemediationLevelUnavailable), - string(CVSS30RemediationLevelNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30RemediationLevel) UnmarshalText(data []byte) error { - s, err := cvss30RemediationLevelPattern(data) - if err == nil { - *e = CVSS30RemediationLevel(s) - } - return err -} - -// CVSS30Scope represents the scopeType in CVSS30. -type CVSS30Scope string - -const ( - // CVSS30ScopeUnchanged is a constant for "UNCHANGED". - CVSS30ScopeUnchanged CVSS30Scope = "UNCHANGED" - // CVSS30ScopeChanged is a constant for "CHANGED". - CVSS30ScopeChanged CVSS30Scope = "CHANGED" -) - -var cvss30ScopePattern = alternativesUnmarshal( - string(CVSS30ScopeUnchanged), - string(CVSS30ScopeChanged), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30Scope) UnmarshalText(data []byte) error { - s, err := cvss30ScopePattern(data) - if err == nil { - *e = CVSS30Scope(s) - } - return err -} - -// CVSS30Severity represents the severityType in CVSS30. -type CVSS30Severity string - -const ( - // CVSS30SeverityNone is a constant for "NONE". - CVSS30SeverityNone CVSS30Severity = "NONE" - // CVSS30SeverityLow is a constant for "LOW". - CVSS30SeverityLow CVSS30Severity = "LOW" - // CVSS30SeverityMedium is a constant for "MEDIUM". - CVSS30SeverityMedium CVSS30Severity = "MEDIUM" - // CVSS30SeverityHigh is a constant for "HIGH". - CVSS30SeverityHigh CVSS30Severity = "HIGH" - // CVSS30SeverityCritical is a constant for "CRITICAL". - CVSS30SeverityCritical CVSS30Severity = "CRITICAL" -) - -var cvss30SeverityPattern = alternativesUnmarshal( - string(CVSS30SeverityNone), - string(CVSS30SeverityLow), - string(CVSS30SeverityMedium), - string(CVSS30SeverityHigh), - string(CVSS30SeverityCritical), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30Severity) UnmarshalText(data []byte) error { - s, err := cvss30SeverityPattern(data) - if err == nil { - *e = CVSS30Severity(s) - } - return err -} - -// CVSS30UserInteraction represents the userInteractionType in CVSS30. -type CVSS30UserInteraction string - -const ( - // CVSS30UserInteractionNone is a constant for "NONE". - CVSS30UserInteractionNone CVSS30UserInteraction = "NONE" - // CVSS30UserInteractionRequired is a constant for "REQUIRED". - CVSS30UserInteractionRequired CVSS30UserInteraction = "REQUIRED" -) - -var cvss30UserInteractionPattern = alternativesUnmarshal( - string(CVSS30UserInteractionNone), - string(CVSS30UserInteractionRequired), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS30UserInteraction) UnmarshalText(data []byte) error { - s, err := cvss30UserInteractionPattern(data) - if err == nil { - *e = CVSS30UserInteraction(s) - } - return err -} diff --git a/csaf/cvss31enums.go b/csaf/cvss31enums.go deleted file mode 100644 index 0de4946..0000000 --- a/csaf/cvss31enums.go +++ /dev/null @@ -1,500 +0,0 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. -// -// SPDX-License-Identifier: MIT -// -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH -// -// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! - -package csaf - -// CVSS31AttackComplexity represents the attackComplexityType in CVSS31. -type CVSS31AttackComplexity string - -const ( - // CVSS31AttackComplexityHigh is a constant for "HIGH". - CVSS31AttackComplexityHigh CVSS31AttackComplexity = "HIGH" - // CVSS31AttackComplexityLow is a constant for "LOW". - CVSS31AttackComplexityLow CVSS31AttackComplexity = "LOW" -) - -var cvss31AttackComplexityPattern = alternativesUnmarshal( - string(CVSS31AttackComplexityHigh), - string(CVSS31AttackComplexityLow), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31AttackComplexity) UnmarshalText(data []byte) error { - s, err := cvss31AttackComplexityPattern(data) - if err == nil { - *e = CVSS31AttackComplexity(s) - } - return err -} - -// CVSS31AttackVector represents the attackVectorType in CVSS31. -type CVSS31AttackVector string - -const ( - // CVSS31AttackVectorNetwork is a constant for "NETWORK". - CVSS31AttackVectorNetwork CVSS31AttackVector = "NETWORK" - // CVSS31AttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK". - CVSS31AttackVectorAdjacentNetwork CVSS31AttackVector = "ADJACENT_NETWORK" - // CVSS31AttackVectorLocal is a constant for "LOCAL". - CVSS31AttackVectorLocal CVSS31AttackVector = "LOCAL" - // CVSS31AttackVectorPhysical is a constant for "PHYSICAL". - CVSS31AttackVectorPhysical CVSS31AttackVector = "PHYSICAL" -) - -var cvss31AttackVectorPattern = alternativesUnmarshal( - string(CVSS31AttackVectorNetwork), - string(CVSS31AttackVectorAdjacentNetwork), - string(CVSS31AttackVectorLocal), - string(CVSS31AttackVectorPhysical), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31AttackVector) UnmarshalText(data []byte) error { - s, err := cvss31AttackVectorPattern(data) - if err == nil { - *e = CVSS31AttackVector(s) - } - return err -} - -// CVSS31CiaRequirement represents the ciaRequirementType in CVSS31. -type CVSS31CiaRequirement string - -const ( - // CVSS31CiaRequirementLow is a constant for "LOW". - CVSS31CiaRequirementLow CVSS31CiaRequirement = "LOW" - // CVSS31CiaRequirementMedium is a constant for "MEDIUM". - CVSS31CiaRequirementMedium CVSS31CiaRequirement = "MEDIUM" - // CVSS31CiaRequirementHigh is a constant for "HIGH". - CVSS31CiaRequirementHigh CVSS31CiaRequirement = "HIGH" - // CVSS31CiaRequirementNotDefined is a constant for "NOT_DEFINED". - CVSS31CiaRequirementNotDefined CVSS31CiaRequirement = "NOT_DEFINED" -) - -var cvss31CiaRequirementPattern = alternativesUnmarshal( - string(CVSS31CiaRequirementLow), - string(CVSS31CiaRequirementMedium), - string(CVSS31CiaRequirementHigh), - string(CVSS31CiaRequirementNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31CiaRequirement) UnmarshalText(data []byte) error { - s, err := cvss31CiaRequirementPattern(data) - if err == nil { - *e = CVSS31CiaRequirement(s) - } - return err -} - -// CVSS31Cia represents the ciaType in CVSS31. -type CVSS31Cia string - -const ( - // CVSS31CiaNone is a constant for "NONE". - CVSS31CiaNone CVSS31Cia = "NONE" - // CVSS31CiaLow is a constant for "LOW". - CVSS31CiaLow CVSS31Cia = "LOW" - // CVSS31CiaHigh is a constant for "HIGH". - CVSS31CiaHigh CVSS31Cia = "HIGH" -) - -var cvss31CiaPattern = alternativesUnmarshal( - string(CVSS31CiaNone), - string(CVSS31CiaLow), - string(CVSS31CiaHigh), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31Cia) UnmarshalText(data []byte) error { - s, err := cvss31CiaPattern(data) - if err == nil { - *e = CVSS31Cia(s) - } - return err -} - -// CVSS31Confidence represents the confidenceType in CVSS31. -type CVSS31Confidence string - -const ( - // CVSS31ConfidenceUnknown is a constant for "UNKNOWN". - CVSS31ConfidenceUnknown CVSS31Confidence = "UNKNOWN" - // CVSS31ConfidenceReasonable is a constant for "REASONABLE". - CVSS31ConfidenceReasonable CVSS31Confidence = "REASONABLE" - // CVSS31ConfidenceConfirmed is a constant for "CONFIRMED". - CVSS31ConfidenceConfirmed CVSS31Confidence = "CONFIRMED" - // CVSS31ConfidenceNotDefined is a constant for "NOT_DEFINED". - CVSS31ConfidenceNotDefined CVSS31Confidence = "NOT_DEFINED" -) - -var cvss31ConfidencePattern = alternativesUnmarshal( - string(CVSS31ConfidenceUnknown), - string(CVSS31ConfidenceReasonable), - string(CVSS31ConfidenceConfirmed), - string(CVSS31ConfidenceNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31Confidence) UnmarshalText(data []byte) error { - s, err := cvss31ConfidencePattern(data) - if err == nil { - *e = CVSS31Confidence(s) - } - return err -} - -// CVSS31ExploitCodeMaturity represents the exploitCodeMaturityType in CVSS31. -type CVSS31ExploitCodeMaturity string - -const ( - // CVSS31ExploitCodeMaturityUnproven is a constant for "UNPROVEN". - CVSS31ExploitCodeMaturityUnproven CVSS31ExploitCodeMaturity = "UNPROVEN" - // CVSS31ExploitCodeMaturityProofOfConcept is a constant for "PROOF_OF_CONCEPT". - CVSS31ExploitCodeMaturityProofOfConcept CVSS31ExploitCodeMaturity = "PROOF_OF_CONCEPT" - // CVSS31ExploitCodeMaturityFunctional is a constant for "FUNCTIONAL". - CVSS31ExploitCodeMaturityFunctional CVSS31ExploitCodeMaturity = "FUNCTIONAL" - // CVSS31ExploitCodeMaturityHigh is a constant for "HIGH". - CVSS31ExploitCodeMaturityHigh CVSS31ExploitCodeMaturity = "HIGH" - // CVSS31ExploitCodeMaturityNotDefined is a constant for "NOT_DEFINED". - CVSS31ExploitCodeMaturityNotDefined CVSS31ExploitCodeMaturity = "NOT_DEFINED" -) - -var cvss31ExploitCodeMaturityPattern = alternativesUnmarshal( - string(CVSS31ExploitCodeMaturityUnproven), - string(CVSS31ExploitCodeMaturityProofOfConcept), - string(CVSS31ExploitCodeMaturityFunctional), - string(CVSS31ExploitCodeMaturityHigh), - string(CVSS31ExploitCodeMaturityNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31ExploitCodeMaturity) UnmarshalText(data []byte) error { - s, err := cvss31ExploitCodeMaturityPattern(data) - if err == nil { - *e = CVSS31ExploitCodeMaturity(s) - } - return err -} - -// CVSS31ModifiedAttackComplexity represents the modifiedAttackComplexityType in CVSS31. -type CVSS31ModifiedAttackComplexity string - -const ( - // CVSS31ModifiedAttackComplexityHigh is a constant for "HIGH". - CVSS31ModifiedAttackComplexityHigh CVSS31ModifiedAttackComplexity = "HIGH" - // CVSS31ModifiedAttackComplexityLow is a constant for "LOW". - CVSS31ModifiedAttackComplexityLow CVSS31ModifiedAttackComplexity = "LOW" - // CVSS31ModifiedAttackComplexityNotDefined is a constant for "NOT_DEFINED". - CVSS31ModifiedAttackComplexityNotDefined CVSS31ModifiedAttackComplexity = "NOT_DEFINED" -) - -var cvss31ModifiedAttackComplexityPattern = alternativesUnmarshal( - string(CVSS31ModifiedAttackComplexityHigh), - string(CVSS31ModifiedAttackComplexityLow), - string(CVSS31ModifiedAttackComplexityNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31ModifiedAttackComplexity) UnmarshalText(data []byte) error { - s, err := cvss31ModifiedAttackComplexityPattern(data) - if err == nil { - *e = CVSS31ModifiedAttackComplexity(s) - } - return err -} - -// CVSS31ModifiedAttackVector represents the modifiedAttackVectorType in CVSS31. -type CVSS31ModifiedAttackVector string - -const ( - // CVSS31ModifiedAttackVectorNetwork is a constant for "NETWORK". - CVSS31ModifiedAttackVectorNetwork CVSS31ModifiedAttackVector = "NETWORK" - // CVSS31ModifiedAttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK". - CVSS31ModifiedAttackVectorAdjacentNetwork CVSS31ModifiedAttackVector = "ADJACENT_NETWORK" - // CVSS31ModifiedAttackVectorLocal is a constant for "LOCAL". - CVSS31ModifiedAttackVectorLocal CVSS31ModifiedAttackVector = "LOCAL" - // CVSS31ModifiedAttackVectorPhysical is a constant for "PHYSICAL". - CVSS31ModifiedAttackVectorPhysical CVSS31ModifiedAttackVector = "PHYSICAL" - // CVSS31ModifiedAttackVectorNotDefined is a constant for "NOT_DEFINED". - CVSS31ModifiedAttackVectorNotDefined CVSS31ModifiedAttackVector = "NOT_DEFINED" -) - -var cvss31ModifiedAttackVectorPattern = alternativesUnmarshal( - string(CVSS31ModifiedAttackVectorNetwork), - string(CVSS31ModifiedAttackVectorAdjacentNetwork), - string(CVSS31ModifiedAttackVectorLocal), - string(CVSS31ModifiedAttackVectorPhysical), - string(CVSS31ModifiedAttackVectorNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31ModifiedAttackVector) UnmarshalText(data []byte) error { - s, err := cvss31ModifiedAttackVectorPattern(data) - if err == nil { - *e = CVSS31ModifiedAttackVector(s) - } - return err -} - -// CVSS31ModifiedCia represents the modifiedCiaType in CVSS31. -type CVSS31ModifiedCia string - -const ( - // CVSS31ModifiedCiaNone is a constant for "NONE". - CVSS31ModifiedCiaNone CVSS31ModifiedCia = "NONE" - // CVSS31ModifiedCiaLow is a constant for "LOW". - CVSS31ModifiedCiaLow CVSS31ModifiedCia = "LOW" - // CVSS31ModifiedCiaHigh is a constant for "HIGH". - CVSS31ModifiedCiaHigh CVSS31ModifiedCia = "HIGH" - // CVSS31ModifiedCiaNotDefined is a constant for "NOT_DEFINED". - CVSS31ModifiedCiaNotDefined CVSS31ModifiedCia = "NOT_DEFINED" -) - -var cvss31ModifiedCiaPattern = alternativesUnmarshal( - string(CVSS31ModifiedCiaNone), - string(CVSS31ModifiedCiaLow), - string(CVSS31ModifiedCiaHigh), - string(CVSS31ModifiedCiaNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31ModifiedCia) UnmarshalText(data []byte) error { - s, err := cvss31ModifiedCiaPattern(data) - if err == nil { - *e = CVSS31ModifiedCia(s) - } - return err -} - -// CVSS31ModifiedPrivilegesRequired represents the modifiedPrivilegesRequiredType in CVSS31. -type CVSS31ModifiedPrivilegesRequired string - -const ( - // CVSS31ModifiedPrivilegesRequiredHigh is a constant for "HIGH". - CVSS31ModifiedPrivilegesRequiredHigh CVSS31ModifiedPrivilegesRequired = "HIGH" - // CVSS31ModifiedPrivilegesRequiredLow is a constant for "LOW". - CVSS31ModifiedPrivilegesRequiredLow CVSS31ModifiedPrivilegesRequired = "LOW" - // CVSS31ModifiedPrivilegesRequiredNone is a constant for "NONE". - CVSS31ModifiedPrivilegesRequiredNone CVSS31ModifiedPrivilegesRequired = "NONE" - // CVSS31ModifiedPrivilegesRequiredNotDefined is a constant for "NOT_DEFINED". - CVSS31ModifiedPrivilegesRequiredNotDefined CVSS31ModifiedPrivilegesRequired = "NOT_DEFINED" -) - -var cvss31ModifiedPrivilegesRequiredPattern = alternativesUnmarshal( - string(CVSS31ModifiedPrivilegesRequiredHigh), - string(CVSS31ModifiedPrivilegesRequiredLow), - string(CVSS31ModifiedPrivilegesRequiredNone), - string(CVSS31ModifiedPrivilegesRequiredNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31ModifiedPrivilegesRequired) UnmarshalText(data []byte) error { - s, err := cvss31ModifiedPrivilegesRequiredPattern(data) - if err == nil { - *e = CVSS31ModifiedPrivilegesRequired(s) - } - return err -} - -// CVSS31ModifiedScope represents the modifiedScopeType in CVSS31. -type CVSS31ModifiedScope string - -const ( - // CVSS31ModifiedScopeUnchanged is a constant for "UNCHANGED". - CVSS31ModifiedScopeUnchanged CVSS31ModifiedScope = "UNCHANGED" - // CVSS31ModifiedScopeChanged is a constant for "CHANGED". - CVSS31ModifiedScopeChanged CVSS31ModifiedScope = "CHANGED" - // CVSS31ModifiedScopeNotDefined is a constant for "NOT_DEFINED". - CVSS31ModifiedScopeNotDefined CVSS31ModifiedScope = "NOT_DEFINED" -) - -var cvss31ModifiedScopePattern = alternativesUnmarshal( - string(CVSS31ModifiedScopeUnchanged), - string(CVSS31ModifiedScopeChanged), - string(CVSS31ModifiedScopeNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31ModifiedScope) UnmarshalText(data []byte) error { - s, err := cvss31ModifiedScopePattern(data) - if err == nil { - *e = CVSS31ModifiedScope(s) - } - return err -} - -// CVSS31ModifiedUserInteraction represents the modifiedUserInteractionType in CVSS31. -type CVSS31ModifiedUserInteraction string - -const ( - // CVSS31ModifiedUserInteractionNone is a constant for "NONE". - CVSS31ModifiedUserInteractionNone CVSS31ModifiedUserInteraction = "NONE" - // CVSS31ModifiedUserInteractionRequired is a constant for "REQUIRED". - CVSS31ModifiedUserInteractionRequired CVSS31ModifiedUserInteraction = "REQUIRED" - // CVSS31ModifiedUserInteractionNotDefined is a constant for "NOT_DEFINED". - CVSS31ModifiedUserInteractionNotDefined CVSS31ModifiedUserInteraction = "NOT_DEFINED" -) - -var cvss31ModifiedUserInteractionPattern = alternativesUnmarshal( - string(CVSS31ModifiedUserInteractionNone), - string(CVSS31ModifiedUserInteractionRequired), - string(CVSS31ModifiedUserInteractionNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31ModifiedUserInteraction) UnmarshalText(data []byte) error { - s, err := cvss31ModifiedUserInteractionPattern(data) - if err == nil { - *e = CVSS31ModifiedUserInteraction(s) - } - return err -} - -// CVSS31PrivilegesRequired represents the privilegesRequiredType in CVSS31. -type CVSS31PrivilegesRequired string - -const ( - // CVSS31PrivilegesRequiredHigh is a constant for "HIGH". - CVSS31PrivilegesRequiredHigh CVSS31PrivilegesRequired = "HIGH" - // CVSS31PrivilegesRequiredLow is a constant for "LOW". - CVSS31PrivilegesRequiredLow CVSS31PrivilegesRequired = "LOW" - // CVSS31PrivilegesRequiredNone is a constant for "NONE". - CVSS31PrivilegesRequiredNone CVSS31PrivilegesRequired = "NONE" -) - -var cvss31PrivilegesRequiredPattern = alternativesUnmarshal( - string(CVSS31PrivilegesRequiredHigh), - string(CVSS31PrivilegesRequiredLow), - string(CVSS31PrivilegesRequiredNone), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31PrivilegesRequired) UnmarshalText(data []byte) error { - s, err := cvss31PrivilegesRequiredPattern(data) - if err == nil { - *e = CVSS31PrivilegesRequired(s) - } - return err -} - -// CVSS31RemediationLevel represents the remediationLevelType in CVSS31. -type CVSS31RemediationLevel string - -const ( - // CVSS31RemediationLevelOfficialFix is a constant for "OFFICIAL_FIX". - CVSS31RemediationLevelOfficialFix CVSS31RemediationLevel = "OFFICIAL_FIX" - // CVSS31RemediationLevelTemporaryFix is a constant for "TEMPORARY_FIX". - CVSS31RemediationLevelTemporaryFix CVSS31RemediationLevel = "TEMPORARY_FIX" - // CVSS31RemediationLevelWorkaround is a constant for "WORKAROUND". - CVSS31RemediationLevelWorkaround CVSS31RemediationLevel = "WORKAROUND" - // CVSS31RemediationLevelUnavailable is a constant for "UNAVAILABLE". - CVSS31RemediationLevelUnavailable CVSS31RemediationLevel = "UNAVAILABLE" - // CVSS31RemediationLevelNotDefined is a constant for "NOT_DEFINED". - CVSS31RemediationLevelNotDefined CVSS31RemediationLevel = "NOT_DEFINED" -) - -var cvss31RemediationLevelPattern = alternativesUnmarshal( - string(CVSS31RemediationLevelOfficialFix), - string(CVSS31RemediationLevelTemporaryFix), - string(CVSS31RemediationLevelWorkaround), - string(CVSS31RemediationLevelUnavailable), - string(CVSS31RemediationLevelNotDefined), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31RemediationLevel) UnmarshalText(data []byte) error { - s, err := cvss31RemediationLevelPattern(data) - if err == nil { - *e = CVSS31RemediationLevel(s) - } - return err -} - -// CVSS31Scope represents the scopeType in CVSS31. -type CVSS31Scope string - -const ( - // CVSS31ScopeUnchanged is a constant for "UNCHANGED". - CVSS31ScopeUnchanged CVSS31Scope = "UNCHANGED" - // CVSS31ScopeChanged is a constant for "CHANGED". - CVSS31ScopeChanged CVSS31Scope = "CHANGED" -) - -var cvss31ScopePattern = alternativesUnmarshal( - string(CVSS31ScopeUnchanged), - string(CVSS31ScopeChanged), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31Scope) UnmarshalText(data []byte) error { - s, err := cvss31ScopePattern(data) - if err == nil { - *e = CVSS31Scope(s) - } - return err -} - -// CVSS31Severity represents the severityType in CVSS31. -type CVSS31Severity string - -const ( - // CVSS31SeverityNone is a constant for "NONE". - CVSS31SeverityNone CVSS31Severity = "NONE" - // CVSS31SeverityLow is a constant for "LOW". - CVSS31SeverityLow CVSS31Severity = "LOW" - // CVSS31SeverityMedium is a constant for "MEDIUM". - CVSS31SeverityMedium CVSS31Severity = "MEDIUM" - // CVSS31SeverityHigh is a constant for "HIGH". - CVSS31SeverityHigh CVSS31Severity = "HIGH" - // CVSS31SeverityCritical is a constant for "CRITICAL". - CVSS31SeverityCritical CVSS31Severity = "CRITICAL" -) - -var cvss31SeverityPattern = alternativesUnmarshal( - string(CVSS31SeverityNone), - string(CVSS31SeverityLow), - string(CVSS31SeverityMedium), - string(CVSS31SeverityHigh), - string(CVSS31SeverityCritical), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31Severity) UnmarshalText(data []byte) error { - s, err := cvss31SeverityPattern(data) - if err == nil { - *e = CVSS31Severity(s) - } - return err -} - -// CVSS31UserInteraction represents the userInteractionType in CVSS31. -type CVSS31UserInteraction string - -const ( - // CVSS31UserInteractionNone is a constant for "NONE". - CVSS31UserInteractionNone CVSS31UserInteraction = "NONE" - // CVSS31UserInteractionRequired is a constant for "REQUIRED". - CVSS31UserInteractionRequired CVSS31UserInteraction = "REQUIRED" -) - -var cvss31UserInteractionPattern = alternativesUnmarshal( - string(CVSS31UserInteractionNone), - string(CVSS31UserInteractionRequired), -) - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (e *CVSS31UserInteraction) UnmarshalText(data []byte) error { - s, err := cvss31UserInteractionPattern(data) - if err == nil { - *e = CVSS31UserInteraction(s) - } - return err -} diff --git a/csaf/cvss3enums.go b/csaf/cvss3enums.go new file mode 100644 index 0000000..494a46c --- /dev/null +++ b/csaf/cvss3enums.go @@ -0,0 +1,500 @@ +// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH +// +// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! + +package csaf + +// CVSS3AttackComplexity represents the attackComplexityType in CVSS3. +type CVSS3AttackComplexity string + +const ( + // CVSS3AttackComplexityHigh is a constant for "HIGH". + CVSS3AttackComplexityHigh CVSS3AttackComplexity = "HIGH" + // CVSS3AttackComplexityLow is a constant for "LOW". + CVSS3AttackComplexityLow CVSS3AttackComplexity = "LOW" +) + +var cvss3AttackComplexityPattern = alternativesUnmarshal( + string(CVSS3AttackComplexityHigh), + string(CVSS3AttackComplexityLow), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3AttackComplexity) UnmarshalText(data []byte) error { + s, err := cvss3AttackComplexityPattern(data) + if err == nil { + *e = CVSS3AttackComplexity(s) + } + return err +} + +// CVSS3AttackVector represents the attackVectorType in CVSS3. +type CVSS3AttackVector string + +const ( + // CVSS3AttackVectorNetwork is a constant for "NETWORK". + CVSS3AttackVectorNetwork CVSS3AttackVector = "NETWORK" + // CVSS3AttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK". + CVSS3AttackVectorAdjacentNetwork CVSS3AttackVector = "ADJACENT_NETWORK" + // CVSS3AttackVectorLocal is a constant for "LOCAL". + CVSS3AttackVectorLocal CVSS3AttackVector = "LOCAL" + // CVSS3AttackVectorPhysical is a constant for "PHYSICAL". + CVSS3AttackVectorPhysical CVSS3AttackVector = "PHYSICAL" +) + +var cvss3AttackVectorPattern = alternativesUnmarshal( + string(CVSS3AttackVectorNetwork), + string(CVSS3AttackVectorAdjacentNetwork), + string(CVSS3AttackVectorLocal), + string(CVSS3AttackVectorPhysical), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3AttackVector) UnmarshalText(data []byte) error { + s, err := cvss3AttackVectorPattern(data) + if err == nil { + *e = CVSS3AttackVector(s) + } + return err +} + +// CVSS3CiaRequirement represents the ciaRequirementType in CVSS3. +type CVSS3CiaRequirement string + +const ( + // CVSS3CiaRequirementLow is a constant for "LOW". + CVSS3CiaRequirementLow CVSS3CiaRequirement = "LOW" + // CVSS3CiaRequirementMedium is a constant for "MEDIUM". + CVSS3CiaRequirementMedium CVSS3CiaRequirement = "MEDIUM" + // CVSS3CiaRequirementHigh is a constant for "HIGH". + CVSS3CiaRequirementHigh CVSS3CiaRequirement = "HIGH" + // CVSS3CiaRequirementNotDefined is a constant for "NOT_DEFINED". + CVSS3CiaRequirementNotDefined CVSS3CiaRequirement = "NOT_DEFINED" +) + +var cvss3CiaRequirementPattern = alternativesUnmarshal( + string(CVSS3CiaRequirementLow), + string(CVSS3CiaRequirementMedium), + string(CVSS3CiaRequirementHigh), + string(CVSS3CiaRequirementNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3CiaRequirement) UnmarshalText(data []byte) error { + s, err := cvss3CiaRequirementPattern(data) + if err == nil { + *e = CVSS3CiaRequirement(s) + } + return err +} + +// CVSS3Cia represents the ciaType in CVSS3. +type CVSS3Cia string + +const ( + // CVSS3CiaNone is a constant for "NONE". + CVSS3CiaNone CVSS3Cia = "NONE" + // CVSS3CiaLow is a constant for "LOW". + CVSS3CiaLow CVSS3Cia = "LOW" + // CVSS3CiaHigh is a constant for "HIGH". + CVSS3CiaHigh CVSS3Cia = "HIGH" +) + +var cvss3CiaPattern = alternativesUnmarshal( + string(CVSS3CiaNone), + string(CVSS3CiaLow), + string(CVSS3CiaHigh), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3Cia) UnmarshalText(data []byte) error { + s, err := cvss3CiaPattern(data) + if err == nil { + *e = CVSS3Cia(s) + } + return err +} + +// CVSS3Confidence represents the confidenceType in CVSS3. +type CVSS3Confidence string + +const ( + // CVSS3ConfidenceUnknown is a constant for "UNKNOWN". + CVSS3ConfidenceUnknown CVSS3Confidence = "UNKNOWN" + // CVSS3ConfidenceReasonable is a constant for "REASONABLE". + CVSS3ConfidenceReasonable CVSS3Confidence = "REASONABLE" + // CVSS3ConfidenceConfirmed is a constant for "CONFIRMED". + CVSS3ConfidenceConfirmed CVSS3Confidence = "CONFIRMED" + // CVSS3ConfidenceNotDefined is a constant for "NOT_DEFINED". + CVSS3ConfidenceNotDefined CVSS3Confidence = "NOT_DEFINED" +) + +var cvss3ConfidencePattern = alternativesUnmarshal( + string(CVSS3ConfidenceUnknown), + string(CVSS3ConfidenceReasonable), + string(CVSS3ConfidenceConfirmed), + string(CVSS3ConfidenceNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3Confidence) UnmarshalText(data []byte) error { + s, err := cvss3ConfidencePattern(data) + if err == nil { + *e = CVSS3Confidence(s) + } + return err +} + +// CVSS3ExploitCodeMaturity represents the exploitCodeMaturityType in CVSS3. +type CVSS3ExploitCodeMaturity string + +const ( + // CVSS3ExploitCodeMaturityUnproven is a constant for "UNPROVEN". + CVSS3ExploitCodeMaturityUnproven CVSS3ExploitCodeMaturity = "UNPROVEN" + // CVSS3ExploitCodeMaturityProofOfConcept is a constant for "PROOF_OF_CONCEPT". + CVSS3ExploitCodeMaturityProofOfConcept CVSS3ExploitCodeMaturity = "PROOF_OF_CONCEPT" + // CVSS3ExploitCodeMaturityFunctional is a constant for "FUNCTIONAL". + CVSS3ExploitCodeMaturityFunctional CVSS3ExploitCodeMaturity = "FUNCTIONAL" + // CVSS3ExploitCodeMaturityHigh is a constant for "HIGH". + CVSS3ExploitCodeMaturityHigh CVSS3ExploitCodeMaturity = "HIGH" + // CVSS3ExploitCodeMaturityNotDefined is a constant for "NOT_DEFINED". + CVSS3ExploitCodeMaturityNotDefined CVSS3ExploitCodeMaturity = "NOT_DEFINED" +) + +var cvss3ExploitCodeMaturityPattern = alternativesUnmarshal( + string(CVSS3ExploitCodeMaturityUnproven), + string(CVSS3ExploitCodeMaturityProofOfConcept), + string(CVSS3ExploitCodeMaturityFunctional), + string(CVSS3ExploitCodeMaturityHigh), + string(CVSS3ExploitCodeMaturityNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3ExploitCodeMaturity) UnmarshalText(data []byte) error { + s, err := cvss3ExploitCodeMaturityPattern(data) + if err == nil { + *e = CVSS3ExploitCodeMaturity(s) + } + return err +} + +// CVSS3ModifiedAttackComplexity represents the modifiedAttackComplexityType in CVSS3. +type CVSS3ModifiedAttackComplexity string + +const ( + // CVSS3ModifiedAttackComplexityHigh is a constant for "HIGH". + CVSS3ModifiedAttackComplexityHigh CVSS3ModifiedAttackComplexity = "HIGH" + // CVSS3ModifiedAttackComplexityLow is a constant for "LOW". + CVSS3ModifiedAttackComplexityLow CVSS3ModifiedAttackComplexity = "LOW" + // CVSS3ModifiedAttackComplexityNotDefined is a constant for "NOT_DEFINED". + CVSS3ModifiedAttackComplexityNotDefined CVSS3ModifiedAttackComplexity = "NOT_DEFINED" +) + +var cvss3ModifiedAttackComplexityPattern = alternativesUnmarshal( + string(CVSS3ModifiedAttackComplexityHigh), + string(CVSS3ModifiedAttackComplexityLow), + string(CVSS3ModifiedAttackComplexityNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3ModifiedAttackComplexity) UnmarshalText(data []byte) error { + s, err := cvss3ModifiedAttackComplexityPattern(data) + if err == nil { + *e = CVSS3ModifiedAttackComplexity(s) + } + return err +} + +// CVSS3ModifiedAttackVector represents the modifiedAttackVectorType in CVSS3. +type CVSS3ModifiedAttackVector string + +const ( + // CVSS3ModifiedAttackVectorNetwork is a constant for "NETWORK". + CVSS3ModifiedAttackVectorNetwork CVSS3ModifiedAttackVector = "NETWORK" + // CVSS3ModifiedAttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK". + CVSS3ModifiedAttackVectorAdjacentNetwork CVSS3ModifiedAttackVector = "ADJACENT_NETWORK" + // CVSS3ModifiedAttackVectorLocal is a constant for "LOCAL". + CVSS3ModifiedAttackVectorLocal CVSS3ModifiedAttackVector = "LOCAL" + // CVSS3ModifiedAttackVectorPhysical is a constant for "PHYSICAL". + CVSS3ModifiedAttackVectorPhysical CVSS3ModifiedAttackVector = "PHYSICAL" + // CVSS3ModifiedAttackVectorNotDefined is a constant for "NOT_DEFINED". + CVSS3ModifiedAttackVectorNotDefined CVSS3ModifiedAttackVector = "NOT_DEFINED" +) + +var cvss3ModifiedAttackVectorPattern = alternativesUnmarshal( + string(CVSS3ModifiedAttackVectorNetwork), + string(CVSS3ModifiedAttackVectorAdjacentNetwork), + string(CVSS3ModifiedAttackVectorLocal), + string(CVSS3ModifiedAttackVectorPhysical), + string(CVSS3ModifiedAttackVectorNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3ModifiedAttackVector) UnmarshalText(data []byte) error { + s, err := cvss3ModifiedAttackVectorPattern(data) + if err == nil { + *e = CVSS3ModifiedAttackVector(s) + } + return err +} + +// CVSS3ModifiedCia represents the modifiedCiaType in CVSS3. +type CVSS3ModifiedCia string + +const ( + // CVSS3ModifiedCiaNone is a constant for "NONE". + CVSS3ModifiedCiaNone CVSS3ModifiedCia = "NONE" + // CVSS3ModifiedCiaLow is a constant for "LOW". + CVSS3ModifiedCiaLow CVSS3ModifiedCia = "LOW" + // CVSS3ModifiedCiaHigh is a constant for "HIGH". + CVSS3ModifiedCiaHigh CVSS3ModifiedCia = "HIGH" + // CVSS3ModifiedCiaNotDefined is a constant for "NOT_DEFINED". + CVSS3ModifiedCiaNotDefined CVSS3ModifiedCia = "NOT_DEFINED" +) + +var cvss3ModifiedCiaPattern = alternativesUnmarshal( + string(CVSS3ModifiedCiaNone), + string(CVSS3ModifiedCiaLow), + string(CVSS3ModifiedCiaHigh), + string(CVSS3ModifiedCiaNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3ModifiedCia) UnmarshalText(data []byte) error { + s, err := cvss3ModifiedCiaPattern(data) + if err == nil { + *e = CVSS3ModifiedCia(s) + } + return err +} + +// CVSS3ModifiedPrivilegesRequired represents the modifiedPrivilegesRequiredType in CVSS3. +type CVSS3ModifiedPrivilegesRequired string + +const ( + // CVSS3ModifiedPrivilegesRequiredHigh is a constant for "HIGH". + CVSS3ModifiedPrivilegesRequiredHigh CVSS3ModifiedPrivilegesRequired = "HIGH" + // CVSS3ModifiedPrivilegesRequiredLow is a constant for "LOW". + CVSS3ModifiedPrivilegesRequiredLow CVSS3ModifiedPrivilegesRequired = "LOW" + // CVSS3ModifiedPrivilegesRequiredNone is a constant for "NONE". + CVSS3ModifiedPrivilegesRequiredNone CVSS3ModifiedPrivilegesRequired = "NONE" + // CVSS3ModifiedPrivilegesRequiredNotDefined is a constant for "NOT_DEFINED". + CVSS3ModifiedPrivilegesRequiredNotDefined CVSS3ModifiedPrivilegesRequired = "NOT_DEFINED" +) + +var cvss3ModifiedPrivilegesRequiredPattern = alternativesUnmarshal( + string(CVSS3ModifiedPrivilegesRequiredHigh), + string(CVSS3ModifiedPrivilegesRequiredLow), + string(CVSS3ModifiedPrivilegesRequiredNone), + string(CVSS3ModifiedPrivilegesRequiredNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3ModifiedPrivilegesRequired) UnmarshalText(data []byte) error { + s, err := cvss3ModifiedPrivilegesRequiredPattern(data) + if err == nil { + *e = CVSS3ModifiedPrivilegesRequired(s) + } + return err +} + +// CVSS3ModifiedScope represents the modifiedScopeType in CVSS3. +type CVSS3ModifiedScope string + +const ( + // CVSS3ModifiedScopeUnchanged is a constant for "UNCHANGED". + CVSS3ModifiedScopeUnchanged CVSS3ModifiedScope = "UNCHANGED" + // CVSS3ModifiedScopeChanged is a constant for "CHANGED". + CVSS3ModifiedScopeChanged CVSS3ModifiedScope = "CHANGED" + // CVSS3ModifiedScopeNotDefined is a constant for "NOT_DEFINED". + CVSS3ModifiedScopeNotDefined CVSS3ModifiedScope = "NOT_DEFINED" +) + +var cvss3ModifiedScopePattern = alternativesUnmarshal( + string(CVSS3ModifiedScopeUnchanged), + string(CVSS3ModifiedScopeChanged), + string(CVSS3ModifiedScopeNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3ModifiedScope) UnmarshalText(data []byte) error { + s, err := cvss3ModifiedScopePattern(data) + if err == nil { + *e = CVSS3ModifiedScope(s) + } + return err +} + +// CVSS3ModifiedUserInteraction represents the modifiedUserInteractionType in CVSS3. +type CVSS3ModifiedUserInteraction string + +const ( + // CVSS3ModifiedUserInteractionNone is a constant for "NONE". + CVSS3ModifiedUserInteractionNone CVSS3ModifiedUserInteraction = "NONE" + // CVSS3ModifiedUserInteractionRequired is a constant for "REQUIRED". + CVSS3ModifiedUserInteractionRequired CVSS3ModifiedUserInteraction = "REQUIRED" + // CVSS3ModifiedUserInteractionNotDefined is a constant for "NOT_DEFINED". + CVSS3ModifiedUserInteractionNotDefined CVSS3ModifiedUserInteraction = "NOT_DEFINED" +) + +var cvss3ModifiedUserInteractionPattern = alternativesUnmarshal( + string(CVSS3ModifiedUserInteractionNone), + string(CVSS3ModifiedUserInteractionRequired), + string(CVSS3ModifiedUserInteractionNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3ModifiedUserInteraction) UnmarshalText(data []byte) error { + s, err := cvss3ModifiedUserInteractionPattern(data) + if err == nil { + *e = CVSS3ModifiedUserInteraction(s) + } + return err +} + +// CVSS3PrivilegesRequired represents the privilegesRequiredType in CVSS3. +type CVSS3PrivilegesRequired string + +const ( + // CVSS3PrivilegesRequiredHigh is a constant for "HIGH". + CVSS3PrivilegesRequiredHigh CVSS3PrivilegesRequired = "HIGH" + // CVSS3PrivilegesRequiredLow is a constant for "LOW". + CVSS3PrivilegesRequiredLow CVSS3PrivilegesRequired = "LOW" + // CVSS3PrivilegesRequiredNone is a constant for "NONE". + CVSS3PrivilegesRequiredNone CVSS3PrivilegesRequired = "NONE" +) + +var cvss3PrivilegesRequiredPattern = alternativesUnmarshal( + string(CVSS3PrivilegesRequiredHigh), + string(CVSS3PrivilegesRequiredLow), + string(CVSS3PrivilegesRequiredNone), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3PrivilegesRequired) UnmarshalText(data []byte) error { + s, err := cvss3PrivilegesRequiredPattern(data) + if err == nil { + *e = CVSS3PrivilegesRequired(s) + } + return err +} + +// CVSS3RemediationLevel represents the remediationLevelType in CVSS3. +type CVSS3RemediationLevel string + +const ( + // CVSS3RemediationLevelOfficialFix is a constant for "OFFICIAL_FIX". + CVSS3RemediationLevelOfficialFix CVSS3RemediationLevel = "OFFICIAL_FIX" + // CVSS3RemediationLevelTemporaryFix is a constant for "TEMPORARY_FIX". + CVSS3RemediationLevelTemporaryFix CVSS3RemediationLevel = "TEMPORARY_FIX" + // CVSS3RemediationLevelWorkaround is a constant for "WORKAROUND". + CVSS3RemediationLevelWorkaround CVSS3RemediationLevel = "WORKAROUND" + // CVSS3RemediationLevelUnavailable is a constant for "UNAVAILABLE". + CVSS3RemediationLevelUnavailable CVSS3RemediationLevel = "UNAVAILABLE" + // CVSS3RemediationLevelNotDefined is a constant for "NOT_DEFINED". + CVSS3RemediationLevelNotDefined CVSS3RemediationLevel = "NOT_DEFINED" +) + +var cvss3RemediationLevelPattern = alternativesUnmarshal( + string(CVSS3RemediationLevelOfficialFix), + string(CVSS3RemediationLevelTemporaryFix), + string(CVSS3RemediationLevelWorkaround), + string(CVSS3RemediationLevelUnavailable), + string(CVSS3RemediationLevelNotDefined), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3RemediationLevel) UnmarshalText(data []byte) error { + s, err := cvss3RemediationLevelPattern(data) + if err == nil { + *e = CVSS3RemediationLevel(s) + } + return err +} + +// CVSS3Scope represents the scopeType in CVSS3. +type CVSS3Scope string + +const ( + // CVSS3ScopeUnchanged is a constant for "UNCHANGED". + CVSS3ScopeUnchanged CVSS3Scope = "UNCHANGED" + // CVSS3ScopeChanged is a constant for "CHANGED". + CVSS3ScopeChanged CVSS3Scope = "CHANGED" +) + +var cvss3ScopePattern = alternativesUnmarshal( + string(CVSS3ScopeUnchanged), + string(CVSS3ScopeChanged), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3Scope) UnmarshalText(data []byte) error { + s, err := cvss3ScopePattern(data) + if err == nil { + *e = CVSS3Scope(s) + } + return err +} + +// CVSS3Severity represents the severityType in CVSS3. +type CVSS3Severity string + +const ( + // CVSS3SeverityNone is a constant for "NONE". + CVSS3SeverityNone CVSS3Severity = "NONE" + // CVSS3SeverityLow is a constant for "LOW". + CVSS3SeverityLow CVSS3Severity = "LOW" + // CVSS3SeverityMedium is a constant for "MEDIUM". + CVSS3SeverityMedium CVSS3Severity = "MEDIUM" + // CVSS3SeverityHigh is a constant for "HIGH". + CVSS3SeverityHigh CVSS3Severity = "HIGH" + // CVSS3SeverityCritical is a constant for "CRITICAL". + CVSS3SeverityCritical CVSS3Severity = "CRITICAL" +) + +var cvss3SeverityPattern = alternativesUnmarshal( + string(CVSS3SeverityNone), + string(CVSS3SeverityLow), + string(CVSS3SeverityMedium), + string(CVSS3SeverityHigh), + string(CVSS3SeverityCritical), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3Severity) UnmarshalText(data []byte) error { + s, err := cvss3SeverityPattern(data) + if err == nil { + *e = CVSS3Severity(s) + } + return err +} + +// CVSS3UserInteraction represents the userInteractionType in CVSS3. +type CVSS3UserInteraction string + +const ( + // CVSS3UserInteractionNone is a constant for "NONE". + CVSS3UserInteractionNone CVSS3UserInteraction = "NONE" + // CVSS3UserInteractionRequired is a constant for "REQUIRED". + CVSS3UserInteractionRequired CVSS3UserInteraction = "REQUIRED" +) + +var cvss3UserInteractionPattern = alternativesUnmarshal( + string(CVSS3UserInteractionNone), + string(CVSS3UserInteractionRequired), +) + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (e *CVSS3UserInteraction) UnmarshalText(data []byte) error { + s, err := cvss3UserInteractionPattern(data) + if err == nil { + *e = CVSS3UserInteraction(s) + } + return err +} diff --git a/csaf/doc.go b/csaf/doc.go index 92d8ee3..22ff7fa 100644 --- a/csaf/doc.go +++ b/csaf/doc.go @@ -10,5 +10,6 @@ package csaf //go:generate go run ./generate_cvss_enums.go -o cvss20enums.go -i ./schema/cvss-v2.0.json -p CVSS20 -//go:generate go run ./generate_cvss_enums.go -o cvss30enums.go -i ./schema/cvss-v3.0.json -p CVSS30 -//go:generate go run ./generate_cvss_enums.go -o cvss31enums.go -i ./schema/cvss-v3.1.json -p CVSS31 +// Generating only enums for CVSS 3.0 and not for 3.1 since the enums of both of them +// are identical. +//go:generate go run ./generate_cvss_enums.go -o cvss3enums.go -i ./schema/cvss-v3.0.json -p CVSS3 From dc41aae07f08f96365dcbef6825c45d4b6692db5 Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Thu, 7 Sep 2023 08:48:34 +0200 Subject: [PATCH 012/235] use up-to-date schema for CVSS 3.0 --- csaf/schema/cvss-v3.0.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/csaf/schema/cvss-v3.0.json b/csaf/schema/cvss-v3.0.json index 28b3c38..af09ec6 100644 --- a/csaf/schema/cvss-v3.0.json +++ b/csaf/schema/cvss-v3.0.json @@ -108,7 +108,7 @@ }, "vectorString": { "type": "string", - "pattern": "^CVSS:3[.]0/((AV:[NALP]|AC:[LH]|PR:[UNLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XUNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[UNLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XUNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$" + "pattern": "^CVSS:3[.]0/((AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$" }, "attackVector": { "$ref": "#/definitions/attackVectorType" }, "attackComplexity": { "$ref": "#/definitions/attackComplexityType" }, From 5a3661e81bca55e09bb1352d94451f95519167ea Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Fri, 8 Sep 2023 14:52:48 +0200 Subject: [PATCH 013/235] use type FileHashValue --- csaf/advisory.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index 1fb8a5d..c656ccb 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -83,8 +83,8 @@ var fileHashValuePattern = patternUnmarshal(`^[0-9a-fA-F]{32,}$`) // FileHash is checksum hash. // Values for 'algorithm' are derived from the currently supported digests OpenSSL. Leading dashes were removed. type FileHash struct { - Algorithm string `json:"algorithm"` // required, default: sha256 - Value string `json:"value"` // required + Algorithm string `json:"algorithm"` // required, default: sha256 + Value FileHashValue `json:"value"` // required } // Hashes is a list of hashes. From f868b13c24a5e61fd85a5148fda0a2b4a7a68877 Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Fri, 8 Sep 2023 16:24:50 +0200 Subject: [PATCH 014/235] added function to validate document --- csaf/advisory.go | 284 +++++++++++++++++++++++++++++++++++------------ 1 file changed, 210 insertions(+), 74 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index c656ccb..d00f997 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -10,6 +10,7 @@ package csaf import ( "encoding/json" + "fmt" "io" "os" ) @@ -83,14 +84,14 @@ var fileHashValuePattern = patternUnmarshal(`^[0-9a-fA-F]{32,}$`) // FileHash is checksum hash. // Values for 'algorithm' are derived from the currently supported digests OpenSSL. Leading dashes were removed. type FileHash struct { - Algorithm string `json:"algorithm"` // required, default: sha256 - Value FileHashValue `json:"value"` // required + Algorithm *string `json:"algorithm"` // required, default: sha256 + Value *FileHashValue `json:"value"` // required } // Hashes is a list of hashes. type Hashes struct { - FileHashes []FileHash `json:"file_hashes"` // required - FileName string `json:"filename"` // required + FileHashes []*FileHash `json:"file_hashes"` // required + FileName *string `json:"filename"` // required } // CPE represents a Common Platform Enumeration in an advisory. @@ -105,8 +106,8 @@ var pURLPattern = patternUnmarshal(`^pkg:[A-Za-z\\.\\-\\+][A-Za-z0-9\\.\\-\\+]*/ // XGenericURI represents an identifier for a product. type XGenericURI struct { - Namespace string `json:"namespace"` // required - URI string `json:"uri"` // required + Namespace *string `json:"namespace"` // required + URI *string `json:"uri"` // required } // ProductIdentificationHelper bundles product identifier information. @@ -124,8 +125,8 @@ type ProductIdentificationHelper struct { // FullProductName is the full name of a product. type FullProductName struct { - Name string `json:"name"` // required - ProductID ProductID `json:"product_id"` // required + Name *string `json:"name"` // required + ProductID *ProductID `json:"product_id"` // required ProductIdentificationHelper *ProductIdentificationHelper `json:"product_identification_helper,omitempty"` } @@ -137,8 +138,8 @@ type FullProductName struct { // version ranges. type Branch struct { Branches []*Branch `json:"branches,omitempty"` - Category BranchCategory `json:"category"` // required - Name string `json:"name"` // required + Category *BranchCategory `json:"category"` // required + Name *string `json:"name"` // required Product *FullProductName `json:"product,omitempty"` } @@ -173,10 +174,10 @@ var csafNoteCategoryPattern = alternativesUnmarshal( // Note reflects the 'Note' object of an advisory. type Note struct { - Audience string `json:"audience,omitempty"` + Audience *string `json:"audience,omitempty"` NoteCategory *NoteCategory `json:"category"` // required Text *string `json:"text"` // required - Title string `json:"title,omitempty"` + Title *string `json:"title,omitempty"` } // ReferenceCategory is the category of a note. @@ -198,15 +199,15 @@ var csafReferenceCategoryPattern = alternativesUnmarshal( // or the entire document and to be of value to the document consumer. type Reference struct { ReferenceCategory *string `json:"category"` // optional, default: external - Summary string `json:"summary"` // required - URL string `json:"url"` // required + Summary *string `json:"summary"` // required + URL *string `json:"url"` // required } // AggregateSeverity stands for the urgency with which the vulnerabilities of an advisory // (not a specific one) should be addressed. type AggregateSeverity struct { Namespace *string `json:"namespace,omitempty"` - Text string `json:"text"` // required + Text *string `json:"text"` // required } // DocumentCategory represents a category of a document. @@ -224,8 +225,8 @@ var csafVersionPattern = alternativesUnmarshal(string(CSAFVersion20)) // TLP provides details about the TLP classification of the document. type TLP struct { - DocumentTLPLabel TLPLabel `json:"label"` // required - URL *string `json:"url,omitempty"` + DocumentTLPLabel *TLPLabel `json:"label"` // required + URL *string `json:"url,omitempty"` } // DocumentDistribution describes rules for sharing a document. @@ -236,11 +237,11 @@ type DocumentDistribution struct { // DocumentPublisher provides information about the publishing entity. type DocumentPublisher struct { - Category Category `json:"category"` // required - ContactDetails *string `json:"contact_details,omitempty"` - IssuingAuthority *string `json:"issuing_authority,omitempty"` - Name string `json:"name"` // required - Namespace string `json:"namespace"` // required + Category *Category `json:"category"` // required + ContactDetails *string `json:"contact_details,omitempty"` + IssuingAuthority *string `json:"issuing_authority,omitempty"` + Name *string `json:"name"` // required + Namespace *string `json:"namespace"` // required } // RevisionNumber specifies a version string to denote clearly the evolution of the content of the document. @@ -250,7 +251,7 @@ var versionPattern = patternUnmarshal("^(0|[1-9][0-9]*)$|^((0|[1-9]\\d*)\\.(0|[1 // Engine contains information about the engine that generated the CSAF document. type Engine struct { - Name string `json:"name"` // required + Name *string `json:"name"` // required Version *string `json:"version,omitempty"` } @@ -259,7 +260,7 @@ type Engine struct { // including the date it was generated and the entity that generated it. type Generator struct { Date *string `json:"date,omitempty"` - Engine Engine `json:"engine"` // required + Engine *Engine `json:"engine"` // required } // TrackingID is a unique identifier for the document. @@ -269,10 +270,10 @@ var trackingIDPattern = patternUnmarshal("^[\\S](.*[\\S])?$") // Revision contains information about one revision of the document. type Revision struct { - Date string `json:"date"` // required - LegacyVersion *string `json:"legacy_version,omitempty"` - Number RevisionNumber `json:"number"` // required - Summary string `json:"summary"` // required + Date *string `json:"date"` // required + LegacyVersion *string `json:"legacy_version,omitempty"` + Number *RevisionNumber `json:"number"` // required + Summary *string `json:"summary"` // required } // TrackingStatus is the category of a publisher. @@ -294,14 +295,14 @@ var csafTrackingStatusPattern = alternativesUnmarshal( // Tracking holds information that is necessary to track a CSAF document. type Tracking struct { - Aliases []*string `json:"aliases,omitempty"` // unique elements - CurrentReleaseDate string `json:"current_release_date"` // required - Generator *Generator `json:"generator"` - ID TrackingID `json:"id"` // required - InitialReleaseDate string `json:"initial_release_date"` // required - RevisionHistory []Revision `json:"revision_history"` // required - Status TrackingStatus `json:"status"` // required - Version RevisionNumber `json:"version"` // required + Aliases []*string `json:"aliases,omitempty"` // unique elements + CurrentReleaseDate *string `json:"current_release_date"` // required + Generator *Generator `json:"generator"` + ID *TrackingID `json:"id"` // required + InitialReleaseDate *string `json:"initial_release_date"` // required + RevisionHistory []*Revision `json:"revision_history"` // required + Status *TrackingStatus `json:"status"` // required + Version *RevisionNumber `json:"version"` // required } // Lang is a language identifier, corresponding to IETF BCP 47 / RFC 5646. @@ -311,18 +312,18 @@ var langPattern = patternUnmarshal("^(([A-Za-z]{2,3}(-[A-Za-z]{3}(-[A-Za-z]{3}){ // Document contains meta-data about an advisory. type Document struct { - Acknowledgements []Acknowledgement `json:"acknowledgements,omitempty"` + Acknowledgements []*Acknowledgement `json:"acknowledgements,omitempty"` AggregateSeverity *AggregateSeverity `json:"aggregate_severity,omitempty"` - Category DocumentCategory `json:"category"` // required - CSAFVersion Version `json:"csaf_version"` // required + Category *DocumentCategory `json:"category"` // required + CSAFVersion *Version `json:"csaf_version"` // required Distribution *DocumentDistribution `json:"distribution,omitempty"` Lang *Lang `json:"lang,omitempty"` Notes []*Note `json:"notes,omitempty"` - Publisher DocumentPublisher `json:"publisher"` // required + Publisher *DocumentPublisher `json:"publisher"` // required References []*Reference `json:"references,omitempty"` SourceLang *Lang `json:"source_lang,omitempty"` - Title string `json:"title"` // required - Tracking Tracking `json:"tracking"` // required + Title *string `json:"title"` // required + Tracking *Tracking `json:"tracking"` // required } // ProductGroupID is a reference token for product group instances. @@ -330,9 +331,9 @@ type ProductGroupID string // ProductGroup is a group of products in the document that belong to one group. type ProductGroup struct { - GroupID string `json:"group_id"` // required - ProductIDs Products `json:"product_ids"` // required, two or more unique elements - Summary *string `json:"summary,omitempty"` + GroupID *string `json:"group_id"` // required + ProductIDs *Products `json:"product_ids"` // required, two or more unique elements + Summary *string `json:"summary,omitempty"` } // ProductGroups is a list of ProductGroupIDs @@ -365,10 +366,10 @@ var csafRelationshipCategoryPattern = alternativesUnmarshal( // Relationship establishes a link between two existing FullProductName elements. type Relationship struct { - Category RelationshipCategory `json:"category"` // required - FullProductName FullProductName `json:"full_product_name"` // required - ProductReference ProductID `json:"product_reference"` // required - RelatesToProductReference ProductID `json:"relates_to_product_reference"` // required + Category *RelationshipCategory `json:"category"` // required + FullProductName *FullProductName `json:"full_product_name"` // required + ProductReference *ProductID `json:"product_reference"` // required + RelatesToProductReference *ProductID `json:"relates_to_product_reference"` // required } @@ -392,8 +393,8 @@ var weaknessIDPattern = patternUnmarshal("^CWE-[1-9]\\d{0,5}$") // CWE holds the MITRE standard Common Weakness Enumeration (CWE) for the weakness associated. type CWE struct { - ID WeaknessID `json:"id"` // required - Name string `json:"name"` // required + ID *WeaknessID `json:"id"` // required + Name *string `json:"name"` // required } // FlagLabel is the label of a flag for a vulnerability. @@ -425,14 +426,14 @@ var csafFlagLabelPattern = alternativesUnmarshal( type Flag struct { Date *string `json:"date,omitempty"` GroupIds *ProductGroups `json:"group_ids,omitempty"` - Label FlagLabel `json:"label"` // required + Label *FlagLabel `json:"label"` // required ProductIds *Products `json:"product_ids,omitempty"` } // VulnerabilityID is the identifier of a vulnerability. type VulnerabilityID struct { - SystemName string `json:"system_name"` // required - Text string `json:"text"` // required + SystemName *string `json:"system_name"` // required + Text *string `json:"text"` // required } // InvolvementParty is the party of an involvement. @@ -490,10 +491,10 @@ var csafInvolvementStatusPattern = alternativesUnmarshal( // The ordered tuple of the values of party and date (if present) SHALL be unique within the involvements // of a vulnerability. type Involvement struct { - Date *string `json:"date,omitempty"` - Party InvolvementParty `json:"party"` // required - Status InvolvementStatus `json:"status"` // required - Summary *string `json:"summary,omitempty"` + Date *string `json:"date,omitempty"` + Party *InvolvementParty `json:"party"` // required + Status *InvolvementStatus `json:"status"` // required + Summary *string `json:"summary,omitempty"` } // ProductStatus contains different lists of ProductIDs which provide details on @@ -570,8 +571,8 @@ var csafRestartRequiredCategoryPattern = alternativesUnmarshal( // RestartRequired provides information on category of restart is required by this remediation to become // effective. type RestartRequired struct { - Category RestartRequiredCategory `json:"category"` // required - Details *string `json:"details,omitempty"` + Category *RestartRequiredCategory `json:"category"` // required + Details *string `json:"details,omitempty"` } // Remediation specifies details on how to handle (and presumably, fix) a vulnerability. @@ -621,15 +622,15 @@ var cvss3VectorStringPattern = patternUnmarshal(`^CVSS:3[.][01]/((AV:[NALP]|AC:[ // CVSS2 holding a CVSS v2.0 value type CVSS2 struct { - Version CVSSVersion2 `json:"version"` // required - VectorString CVSS2VectorString `json:"vectorString"` // required + Version *CVSSVersion2 `json:"version"` // required + VectorString *CVSS2VectorString `json:"vectorString"` // required AccessVector *CVSS20AccessVector `json:"accessVector,omitempty"` AccessComplexity *CVSS20AccessComplexity `json:"accessComplexity,omitempty"` Authentication *CVSS20Authentication `json:"authentication,omitempty"` ConfidentialityImpact *CVSS20Cia `json:"confidentialityImpact,omitempty"` IntegrityImpact *CVSS20Cia `json:"integrityImpact,omitempty"` AvailabilityImpact *CVSS20Cia `json:"availabilityImpact,omitempty"` - BaseScore float64 `json:"baseScore"` // required + BaseScore *float64 `json:"baseScore"` // required Exploitability *CVSS20Exploitability `json:"exploitability,omitempty"` RemediationLevel *CVSS20RemediationLevel `json:"remediationLevel,omitempty"` ReportConfidence *CVSS20ReportConfidence `json:"reportConfidence,omitempty"` @@ -644,8 +645,8 @@ type CVSS2 struct { // CVSS3 holding a CVSS v3.x value type CVSS3 struct { - Version CVSSVersion3 `json:"version"` // required - VectorString CVSS3VectorString `json:"vectorString"` // required + Version *CVSSVersion3 `json:"version"` // required + VectorString *CVSS3VectorString `json:"vectorString"` // required AttackVector *CVSS3AttackVector `json:"attackVector,omitempty"` AttackComplexity *CVSS3AttackComplexity `json:"attackComplexity,omitempty"` PrivilegesRequired *CVSS3PrivilegesRequired `json:"privilegesRequired,omitempty"` @@ -654,8 +655,8 @@ type CVSS3 struct { ConfidentialityImpact *CVSS3Cia `json:"confidentialityImpact,omitempty"` IntegrityImpact CVSS3Cia `json:"integrityImpact,omitempty"` AvailabilityImpact *CVSS3Cia `json:"availabilityImpact,omitempty"` - BaseScore float64 `json:"baseScore"` // required - BaseSeverity CVSS3Severity `json:"baseSeverity"` // required + BaseScore *float64 `json:"baseScore"` // required + BaseSeverity *CVSS3Severity `json:"baseSeverity"` // required ExploitCodeMaturity *CVSS3ExploitCodeMaturity `json:"exploitCodeMaturity,omitempty"` RemediationLevel *CVSS3RemediationLevel `json:"remediationLevel,omitempty"` ReportConfidence *CVSS3Confidence `json:"reportConfidence,omitempty"` @@ -703,11 +704,11 @@ var csafThreatCategoryPattern = alternativesUnmarshal( // Threat contains information about a vulnerability that can change with time. type Threat struct { - Category ThreatCategory `json:"category"` // required - Date *string `json:"date,omitempty"` - Details string `json:"details"` // required - GroupIds *ProductGroups `json:"group_ids,omitempty"` - ProductIds *Products `json:"product_ids,omitempty"` + Category *ThreatCategory `json:"category"` // required + Date *string `json:"date,omitempty"` + Details *string `json:"details"` // required + GroupIds *ProductGroups `json:"group_ids,omitempty"` + ProductIds *Products `json:"product_ids,omitempty"` } // Vulnerability contains all fields that are related to a single vulnerability in the document. @@ -731,15 +732,147 @@ type Vulnerability struct { // Advisory represents a CSAF advisory. type Advisory struct { - Document Document `json:"document"` // required + Document *Document `json:"document"` // required ProductTree *ProductTree `json:"product_tree,omitempty"` Vulnerabilities []*Vulnerability `json:"vulnerabilities,omitempty"` } +func (adv *Advisory) ValidateDocument() error { + doc := adv.Document + + if doc.AggregateSeverity != nil { + if doc.AggregateSeverity.Text == nil { + return fmt.Errorf("the property 'aggregate_severity' is missing the property 'text'") + } + } + + if doc.Category == nil { + return fmt.Errorf("the property 'document' is missing the property 'category'") + } + + if doc.CSAFVersion == nil { + return fmt.Errorf("the property 'document' is missing the property 'csaf_version'") + } + + if doc.Distribution != nil { + if doc.Distribution.Text == nil && doc.Distribution.TLP == nil { + return fmt.Errorf("the property 'distribution' must at least contain one of the following properties:" + + "'text', 'tlp'") + } + } + + if doc.Notes != nil { + for index, note := range doc.Notes { + if note.NoteCategory == nil { + return fmt.Errorf("the %d. note in the property 'document' is missing the property 'note_category'", index) + } + if note.Text == nil { + return fmt.Errorf("the %d. note in the property 'document' is missing the property 'text'", index) + } + } + } + + if doc.Publisher == nil { + return fmt.Errorf("the property 'document' is missing the property 'publisher'") + } + + publisher := doc.Publisher + + if publisher.Category == nil { + return fmt.Errorf("the publisher in the property 'document' is missing the property 'category'") + } + + if publisher.Name == nil { + return fmt.Errorf("the publisher in the property 'document' is missing the property 'name'") + } + + if publisher.Namespace == nil { + return fmt.Errorf("the publisher in the property 'document' is missing the property 'namespace'") + } + + if doc.References != nil { + for index, ref := range doc.References { + if ref.Summary == nil { + return fmt.Errorf("the %d. reference in the property 'document' is missing the property 'summary'", index) + } + if ref.URL == nil { + return fmt.Errorf("the %d. reference in the property 'document' is missing the property 'url'", index) + } + } + } + + if doc.Title == nil { + return fmt.Errorf("the property 'document' is missing the property 'title'") + } + + if doc.Tracking == nil { + return fmt.Errorf("the property 'document' is missing the property 'tracking'") + } + + tracking := doc.Tracking + + if tracking.CurrentReleaseDate == nil { + return fmt.Errorf("the property 'tracking' is missing the property 'current_release_date'") + } + + if tracking.Generator != nil { + generator := tracking.Generator + if generator.Engine == nil { + return fmt.Errorf("the property 'generator' is missing the property 'engine'") + } + + if generator.Engine.Version == nil { + return fmt.Errorf("the property 'engine' is missing the property 'version'") + } + } + + if tracking.ID == nil { + return fmt.Errorf("the property 'tracking' is missing the property 'id'") + } + + if tracking.InitialReleaseDate == nil { + return fmt.Errorf("the property 'tracking' is missing the property 'initial_release_date'") + } + + if tracking.RevisionHistory == nil { + return fmt.Errorf("the property 'tracking' is missing the property 'revision_history'") + } + + for index, revision := range tracking.RevisionHistory { + if revision.Date == nil { + return fmt.Errorf("the %d. revision in the property 'document' is missing the property 'date'", index) + } + + if revision.Number == nil { + return fmt.Errorf("the %d. revision in the property 'document' is missing the property 'number'", index) + } + + if revision.Summary == nil { + return fmt.Errorf("the %d. revision in the property 'document' is missing the property 'summary'", index) + } + } + + if tracking.Status == nil { + return fmt.Errorf("the property 'tracking' is missing the property 'status'") + } + + if tracking.Version == nil { + return fmt.Errorf("the property 'tracking' is missing the property 'version'") + } + + return nil +} + // Validate checks if the advisory is valid. // Returns an error if the validation fails otherwise nil. func (adv *Advisory) Validate() error { - // TODO + if adv.Document == nil { + return fmt.Errorf("the advisory is missing the property 'document'") + } + + if validationError := adv.ValidateDocument(); validationError != nil { + return validationError + } return nil } @@ -754,6 +887,9 @@ func LoadAdvisory(fname string) (*Advisory, error) { if err := json.NewDecoder(f).Decode(&advisory); err != nil { return nil, err } + if validationError := advisory.Validate(); validationError != nil { + return nil, validationError + } return &advisory, nil } From ed42f193d13551345ae1da83ee2daaefd9fb820c Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Fri, 8 Sep 2023 20:04:04 +0200 Subject: [PATCH 015/235] added function to validate ProductTree --- csaf/advisory.go | 82 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 82 insertions(+) diff --git a/csaf/advisory.go b/csaf/advisory.go index d00f997..f119c24 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -863,6 +863,81 @@ func (adv *Advisory) ValidateDocument() error { return nil } +func ValidateBranch(branches []*Branch) error { + for _, branch := range branches { + if branch.Category == nil { + return fmt.Errorf("element of property 'branches' is missing the property 'category'") + } + + if branch.Name == nil { + return fmt.Errorf("element of property 'branches' is missing the property 'name'") + } + + if branch.Product != nil { + if branch.Product.Name == nil { + return fmt.Errorf("property 'product' is missing the property 'name'") + } + + if branch.Product.ProductID == nil { + return fmt.Errorf("property 'product' is missing the property 'product_id'") + } + + if branch.Product.ProductIdentificationHelper != nil { + helper := branch.Product.ProductIdentificationHelper + + if helper.Hashes != nil { + if helper.Hashes.FileHashes == nil { + return fmt.Errorf("property 'hashes' is missing the property 'file_hashes'") + } + + for _, hash := range helper.Hashes.FileHashes { + if hash.Algorithm == nil { + return fmt.Errorf("element of property 'file_hashes' is missing the property 'algorithm'") + } + + if hash.Value == nil { + return fmt.Errorf("element of property 'file_hashes' is missing the property 'value'") + } + } + + if helper.Hashes.FileName == nil { + return fmt.Errorf("property 'hashes' is missing the property 'filename'") + } + } + + if helper.XGenericURIs != nil { + for _, uri := range helper.XGenericURIs { + if uri.Namespace == nil { + return fmt.Errorf("element of property 'x_generic_uris' is missing the property 'namespace'") + } + + if uri.URI == nil { + return fmt.Errorf("element of property 'x_generic_uris' is missing the property 'uri'") + } + } + } + } + } + + if branch.Branches != nil { + if validationError := ValidateBranch(branch.Branches); validationError != nil { + return validationError + } + } + } + return nil +} + +func (adv *Advisory) ValidateProductTree() error { + tree := adv.ProductTree + if tree.Branches != nil { + if validationError := ValidateBranch(tree.Branches); validationError != nil { + return validationError + } + } + return nil +} + // Validate checks if the advisory is valid. // Returns an error if the validation fails otherwise nil. func (adv *Advisory) Validate() error { @@ -873,6 +948,13 @@ func (adv *Advisory) Validate() error { if validationError := adv.ValidateDocument(); validationError != nil { return validationError } + + if adv.ProductTree != nil { + if validationError := adv.ValidateProductTree(); validationError != nil { + return validationError + } + } + return nil } From 4da9f67e2e9e32c4b285e9af9098372cda8fc158 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Sat, 9 Sep 2023 21:15:25 +0200 Subject: [PATCH 016/235] Distribute the validation to the types to reduce the overall complexity. --- csaf/advisory.go | 505 +++++++++++++++++++++++++++++------------------ 1 file changed, 309 insertions(+), 196 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index f119c24..c4966bf 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -10,6 +10,7 @@ package csaf import ( "encoding/json" + "errors" "fmt" "io" "os" @@ -110,17 +111,20 @@ type XGenericURI struct { URI *string `json:"uri"` // required } +// XGenericURIs is a list of XGenericURI. +type XGenericURIs []*XGenericURI + // ProductIdentificationHelper bundles product identifier information. // Supported formats for SBOMs are SPDX, CycloneDX, and SWID type ProductIdentificationHelper struct { - CPE *CPE `json:"cpe,omitempty"` - Hashes *Hashes `json:"hashes,omitempty"` - ModelNumbers []*string `json:"model_numbers,omitempty"` // unique elements - PURL *PURL `json:"purl,omitempty"` - SBOMURLs []*string `json:"sbom_urls,omitempty"` - SerialNumbers []*string `json:"serial_numbers,omitempty"` // unique elements - SKUs []*string `json:"skus,omitempty"` - XGenericURIs []*XGenericURI `json:"x_generic_uris,omitempty"` + CPE *CPE `json:"cpe,omitempty"` + Hashes *Hashes `json:"hashes,omitempty"` + ModelNumbers []*string `json:"model_numbers,omitempty"` // unique elements + PURL *PURL `json:"purl,omitempty"` + SBOMURLs []*string `json:"sbom_urls,omitempty"` + SerialNumbers []*string `json:"serial_numbers,omitempty"` // unique elements + SKUs []*string `json:"skus,omitempty"` + XGenericURIs XGenericURIs `json:"x_generic_uris,omitempty"` } // FullProductName is the full name of a product. @@ -137,7 +141,7 @@ type FullProductName struct { // If the category is 'product_version_range' the name MUST contain // version ranges. type Branch struct { - Branches []*Branch `json:"branches,omitempty"` + Branches Branches `json:"branches,omitempty"` Category *BranchCategory `json:"category"` // required Name *string `json:"name"` // required Product *FullProductName `json:"product,omitempty"` @@ -293,6 +297,9 @@ var csafTrackingStatusPattern = alternativesUnmarshal( string(CSAFTrackingStatusFinal), string(CSAFTrackingStatusInterim)) +// Revisions is a list of Revision. +type Revisions []*Revision + // Tracking holds information that is necessary to track a CSAF document. type Tracking struct { Aliases []*string `json:"aliases,omitempty"` // unique elements @@ -300,7 +307,7 @@ type Tracking struct { Generator *Generator `json:"generator"` ID *TrackingID `json:"id"` // required InitialReleaseDate *string `json:"initial_release_date"` // required - RevisionHistory []*Revision `json:"revision_history"` // required + RevisionHistory Revisions `json:"revision_history"` // required Status *TrackingStatus `json:"status"` // required Version *RevisionNumber `json:"version"` // required } @@ -318,9 +325,9 @@ type Document struct { CSAFVersion *Version `json:"csaf_version"` // required Distribution *DocumentDistribution `json:"distribution,omitempty"` Lang *Lang `json:"lang,omitempty"` - Notes []*Note `json:"notes,omitempty"` + Notes Notes `json:"notes,omitempty"` Publisher *DocumentPublisher `json:"publisher"` // required - References []*Reference `json:"references,omitempty"` + References References `json:"references,omitempty"` SourceLang *Lang `json:"source_lang,omitempty"` Title *string `json:"title"` // required Tracking *Tracking `json:"tracking"` // required @@ -373,9 +380,12 @@ type Relationship struct { } +// Branches is a list of Branch. +type Branches []*Branch + // ProductTree contains product names that can be referenced elsewhere in the document. type ProductTree struct { - Branches []*Branch `json:"branches,omitempty"` + Branches Branches `json:"branches,omitempty"` FullProductNames []*FullProductName `json:"full_product_name,omitempty"` ProductGroups *ProductGroups `json:"product_groups,omitempty"` RelationShips []*Relationship `json:"relationships,omitempty"` @@ -711,6 +721,12 @@ type Threat struct { ProductIds *Products `json:"product_ids,omitempty"` } +// Notes is a list of Note. +type Notes []*Note + +// References is a list of Reference. +type References []*Reference + // Vulnerability contains all fields that are related to a single vulnerability in the document. type Vulnerability struct { Acknowledgements []*Acknowledgement `json:"acknowledgements,omitempty"` @@ -720,9 +736,9 @@ type Vulnerability struct { Flags []*Flag `json:"flags,omitempty"` Ids []*VulnerabilityID `json:"ids,omitempty"` // unique ID elements Involvements []*Involvement `json:"involvements,omitempty"` - Notes []*Note `json:"notes,omitempty"` + Notes Notes `json:"notes,omitempty"` ProductStatus *ProductStatus `json:"product_status,omitempty"` - References []*Reference `json:"references,omitempty"` + References References `json:"references,omitempty"` ReleaseDate *string `json:"release_date,omitempty"` Remediations []*Remediation `json:"remediations,omitempty"` Scores []*Score `json:"scores,omitempty"` @@ -737,224 +753,321 @@ type Advisory struct { Vulnerabilities []*Vulnerability `json:"vulnerabilities,omitempty"` } -func (adv *Advisory) ValidateDocument() error { - doc := adv.Document +// Validate validates a AggregateSeverity. +func (as *AggregateSeverity) Validate() error { + if as.Text == nil { + return errors.New("'text' is missing") + } + return nil +} - if doc.AggregateSeverity != nil { - if doc.AggregateSeverity.Text == nil { - return fmt.Errorf("the property 'aggregate_severity' is missing the property 'text'") +// Validate validates a DocumentDistribution. +func (dd *DocumentDistribution) Validate() error { + if dd.Text == nil && dd.TLP == nil { + return errors.New("needs at least properties 'text' or 'tlp'") + } + return nil +} + +// Validate validates a list of notes. +func (ns Notes) Validate() error { + for i, n := range ns { + if err := n.Validate(); err != nil { + return fmt.Errorf("%d. note is invalid: %w", i+1, err) } } + return nil +} - if doc.Category == nil { - return fmt.Errorf("the property 'document' is missing the property 'category'") +// Validate validates a single note. +func (n *Note) Validate() error { + switch { + case n == nil: + return errors.New("is nil") + case n.NoteCategory == nil: + return errors.New("'note_category' is missing") + case n.Text == nil: + return errors.New("'text' is missing") + default: + return nil } +} - if doc.CSAFVersion == nil { - return fmt.Errorf("the property 'document' is missing the property 'csaf_version'") +// Validate validates a DocumentPublisher. +func (p *DocumentPublisher) Validate() error { + switch { + case p.Category == nil: + return errors.New("'document' is missing") + case p.Name == nil: + return errors.New("'name' is missing") + case p.Namespace == nil: + return errors.New("'namespace' is missing") + default: + return nil } +} +// Validate validates a single reference. +func (r *Reference) Validate() error { + switch { + case r.Summary == nil: + return errors.New("summary' is missing") + case r.URL == nil: + return errors.New("'url' is missing") + default: + return nil + } +} + +// Validate validates a list of references. +func (rs References) Validate() error { + for i, r := range rs { + if err := r.Validate(); err != nil { + return fmt.Errorf("%d. reference is invalid: %w", i+1, err) + } + } + return nil +} + +// Validate validates a single revision. +func (r *Revision) Validate() error { + switch { + case r.Date == nil: + return errors.New("'date' is missing") + case r.Number == nil: + return errors.New("'number' is missing") + case r.Summary == nil: + return errors.New("'summary' is missing") + default: + return nil + } +} + +// Validate validates a list of revisions. +func (rs Revisions) Validate() error { + for i, r := range rs { + if err := r.Validate(); err != nil { + return fmt.Errorf("%d. revision is invalid: %w", i+1, err) + } + } + return nil +} + +// Validate validates an Engine. +func (e *Engine) Validate() error { + if e.Version == nil { + return errors.New("'version' is missing") + } + return nil +} + +// Validate validates a Generator. +func (g *Generator) Validate() error { + if g.Engine == nil { + return errors.New("'engine' is missing") + } + if err := g.Engine.Validate(); err != nil { + return fmt.Errorf("'engine' is invalid: %w", err) + } + return nil +} + +// Validate validates a single Tracking. +func (t *Tracking) Validate() error { + switch { + case t.CurrentReleaseDate == nil: + return errors.New("'current_release_date' is missing") + case t.ID == nil: + return errors.New("'id' is missing") + case t.InitialReleaseDate == nil: + return errors.New("'initial_release_date' is missing") + case t.RevisionHistory == nil: + return errors.New("'revision_history' is missing") + case t.Status == nil: + return errors.New("'status' is missing") + case t.Version == nil: + return errors.New("'version' is missing") + } + if err := t.RevisionHistory.Validate(); err != nil { + return fmt.Errorf("'revision_history' is invalid: %w", err) + } + if t.Generator != nil { + if err := t.Generator.Validate(); err != nil { + return fmt.Errorf("'generator' is invalid: %w", err) + } + } + return nil +} + +// Validate validates a Document. +func (doc *Document) Validate() error { + switch { + case doc.Category == nil: + return errors.New("'category' is missing") + case doc.CSAFVersion == nil: + return errors.New("'csaf_version' is missing") + case doc.Publisher == nil: + return errors.New("'publisher' is missing") + case doc.Title == nil: + return errors.New("'title' is missing") + case doc.Tracking == nil: + return errors.New("'tracking' is missing") + } + if err := doc.Tracking.Validate(); err != nil { + return fmt.Errorf("'tracking' is invalid: %w", err) + } if doc.Distribution != nil { - if doc.Distribution.Text == nil && doc.Distribution.TLP == nil { - return fmt.Errorf("the property 'distribution' must at least contain one of the following properties:" + - "'text', 'tlp'") + if err := doc.Distribution.Validate(); err != nil { + return fmt.Errorf("'distribution' is invalid: %w", err) } } - - if doc.Notes != nil { - for index, note := range doc.Notes { - if note.NoteCategory == nil { - return fmt.Errorf("the %d. note in the property 'document' is missing the property 'note_category'", index) - } - if note.Text == nil { - return fmt.Errorf("the %d. note in the property 'document' is missing the property 'text'", index) - } + if doc.AggregateSeverity != nil { + if err := doc.AggregateSeverity.Validate(); err != nil { + return fmt.Errorf("'aggregate_severity' is invalid: %w", err) } } - - if doc.Publisher == nil { - return fmt.Errorf("the property 'document' is missing the property 'publisher'") + if err := doc.Publisher.Validate(); err != nil { + return fmt.Errorf("'publisher' is invalid: %w", err) } - - publisher := doc.Publisher - - if publisher.Category == nil { - return fmt.Errorf("the publisher in the property 'document' is missing the property 'category'") + if err := doc.References.Validate(); err != nil { + return fmt.Errorf("'references' is invalid: %w", err) } - - if publisher.Name == nil { - return fmt.Errorf("the publisher in the property 'document' is missing the property 'name'") + if err := doc.Notes.Validate(); err != nil { + return fmt.Errorf("'notes' is invalid: %w", err) } - - if publisher.Namespace == nil { - return fmt.Errorf("the publisher in the property 'document' is missing the property 'namespace'") - } - - if doc.References != nil { - for index, ref := range doc.References { - if ref.Summary == nil { - return fmt.Errorf("the %d. reference in the property 'document' is missing the property 'summary'", index) - } - if ref.URL == nil { - return fmt.Errorf("the %d. reference in the property 'document' is missing the property 'url'", index) - } - } - } - - if doc.Title == nil { - return fmt.Errorf("the property 'document' is missing the property 'title'") - } - - if doc.Tracking == nil { - return fmt.Errorf("the property 'document' is missing the property 'tracking'") - } - - tracking := doc.Tracking - - if tracking.CurrentReleaseDate == nil { - return fmt.Errorf("the property 'tracking' is missing the property 'current_release_date'") - } - - if tracking.Generator != nil { - generator := tracking.Generator - if generator.Engine == nil { - return fmt.Errorf("the property 'generator' is missing the property 'engine'") - } - - if generator.Engine.Version == nil { - return fmt.Errorf("the property 'engine' is missing the property 'version'") - } - } - - if tracking.ID == nil { - return fmt.Errorf("the property 'tracking' is missing the property 'id'") - } - - if tracking.InitialReleaseDate == nil { - return fmt.Errorf("the property 'tracking' is missing the property 'initial_release_date'") - } - - if tracking.RevisionHistory == nil { - return fmt.Errorf("the property 'tracking' is missing the property 'revision_history'") - } - - for index, revision := range tracking.RevisionHistory { - if revision.Date == nil { - return fmt.Errorf("the %d. revision in the property 'document' is missing the property 'date'", index) - } - - if revision.Number == nil { - return fmt.Errorf("the %d. revision in the property 'document' is missing the property 'number'", index) - } - - if revision.Summary == nil { - return fmt.Errorf("the %d. revision in the property 'document' is missing the property 'summary'", index) - } - } - - if tracking.Status == nil { - return fmt.Errorf("the property 'tracking' is missing the property 'status'") - } - - if tracking.Version == nil { - return fmt.Errorf("the property 'tracking' is missing the property 'version'") - } - return nil } -func ValidateBranch(branches []*Branch) error { - for _, branch := range branches { - if branch.Category == nil { - return fmt.Errorf("element of property 'branches' is missing the property 'category'") - } +// Validate validates a single FileHash. +func (fh *FileHash) Validate() error { + switch { + case fh == nil: + return errors.New("is nil") + case fh.Algorithm == nil: + return errors.New("'algorithm' is missing") + case fh.Value == nil: + return errors.New("'value' is missing") + default: + return nil + } +} - if branch.Name == nil { - return fmt.Errorf("element of property 'branches' is missing the property 'name'") - } - - if branch.Product != nil { - if branch.Product.Name == nil { - return fmt.Errorf("property 'product' is missing the property 'name'") - } - - if branch.Product.ProductID == nil { - return fmt.Errorf("property 'product' is missing the property 'product_id'") - } - - if branch.Product.ProductIdentificationHelper != nil { - helper := branch.Product.ProductIdentificationHelper - - if helper.Hashes != nil { - if helper.Hashes.FileHashes == nil { - return fmt.Errorf("property 'hashes' is missing the property 'file_hashes'") - } - - for _, hash := range helper.Hashes.FileHashes { - if hash.Algorithm == nil { - return fmt.Errorf("element of property 'file_hashes' is missing the property 'algorithm'") - } - - if hash.Value == nil { - return fmt.Errorf("element of property 'file_hashes' is missing the property 'value'") - } - } - - if helper.Hashes.FileName == nil { - return fmt.Errorf("property 'hashes' is missing the property 'filename'") - } - } - - if helper.XGenericURIs != nil { - for _, uri := range helper.XGenericURIs { - if uri.Namespace == nil { - return fmt.Errorf("element of property 'x_generic_uris' is missing the property 'namespace'") - } - - if uri.URI == nil { - return fmt.Errorf("element of property 'x_generic_uris' is missing the property 'uri'") - } - } - } - } - } - - if branch.Branches != nil { - if validationError := ValidateBranch(branch.Branches); validationError != nil { - return validationError - } +// Validate validates a list of file hashes. +func (hs *Hashes) Validate() error { + switch { + case hs.FileHashes == nil: + return errors.New("'hashes' is missing") + case hs.FileName == nil: + return errors.New("'filename' is missing") + } + for i, fh := range hs.FileHashes { + if err := fh.Validate(); err != nil { + return fmt.Errorf("%d. file hash is invalid: %w", i+1, err) } } return nil } -func (adv *Advisory) ValidateProductTree() error { - tree := adv.ProductTree - if tree.Branches != nil { - if validationError := ValidateBranch(tree.Branches); validationError != nil { - return validationError +// Validate validates a single XGenericURI. +func (xgu *XGenericURI) Validate() error { + switch { + case xgu == nil: + return errors.New("is nil") + case xgu.Namespace == nil: + return errors.New("'namespace' is missing") + case xgu.URI == nil: + return errors.New("'uri' is missing") + default: + return nil + } +} + +// Validate validates a list of XGenericURIs. +func (xgus XGenericURIs) Validate() error { + for i, xgu := range xgus { + if err := xgu.Validate(); err != nil { + return fmt.Errorf("%d. generic uri is invalid: %w", i+1, err) } } return nil } +// Validate validates a ProductIdentificationHelper. +func (pih *ProductIdentificationHelper) Validate() error { + if pih.Hashes != nil { + if err := pih.Hashes.Validate(); err != nil { + return fmt.Errorf("'hashes' is invalid: %w", err) + } + } + if pih.XGenericURIs != nil { + if err := pih.XGenericURIs.Validate(); err != nil { + return fmt.Errorf("'x_generic_uris' is invalid: %w", err) + } + } + return nil +} + +// Validate validates a FullProductName. +func (fpn *FullProductName) Validate() error { + switch { + case fpn.Name == nil: + return errors.New("'name' is missing") + case fpn.ProductID == nil: + return errors.New("'product_id' is missing") + } + if fpn.ProductIdentificationHelper != nil { + if err := fpn.ProductIdentificationHelper.Validate(); err != nil { + return fmt.Errorf("'product_identification_helper' is invalid: %w", err) + } + } + return nil +} + +// Validate validates a single Branch. +func (b *Branch) Validate() error { + switch { + case b.Category == nil: + return errors.New("'category' is missing") + case b.Name == nil: + return errors.New("'name' is missing") + } + if b.Product != nil { + if err := b.Product.Validate(); err != nil { + return fmt.Errorf("'product' is invalid: %w", err) + } + } + return b.Branches.Validate() +} + +// Validate validates a list of branches. +func (bs Branches) Validate() error { + for i, b := range bs { + if err := b.Validate(); err != nil { + return fmt.Errorf("%d. branch is invalid: %w", i+1, err) + } + } + return nil +} + +// Validate validates a ProductTree. +func (pt *ProductTree) Validate() error { + return pt.Branches.Validate() +} + // Validate checks if the advisory is valid. // Returns an error if the validation fails otherwise nil. func (adv *Advisory) Validate() error { if adv.Document == nil { - return fmt.Errorf("the advisory is missing the property 'document'") + return errors.New("'document' is missing") } - - if validationError := adv.ValidateDocument(); validationError != nil { - return validationError + if err := adv.Document.Validate(); err != nil { + return fmt.Errorf("'document' is invalid: %w", err) } - if adv.ProductTree != nil { - if validationError := adv.ValidateProductTree(); validationError != nil { - return validationError + if err := adv.ProductTree.Validate(); err != nil { + return fmt.Errorf("'product_tree' is invalid: %w", err) } } - return nil } @@ -969,8 +1082,8 @@ func LoadAdvisory(fname string) (*Advisory, error) { if err := json.NewDecoder(f).Decode(&advisory); err != nil { return nil, err } - if validationError := advisory.Validate(); validationError != nil { - return nil, validationError + if err := advisory.Validate(); err != nil { + return nil, err } return &advisory, nil } From bdd7f24b317b8798352f1118c35dbb104d66a3c4 Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Mon, 11 Sep 2023 21:31:45 +0200 Subject: [PATCH 017/235] fix: product tree properties --- csaf/advisory.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index f119c24..b334a31 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -376,8 +376,8 @@ type Relationship struct { // ProductTree contains product names that can be referenced elsewhere in the document. type ProductTree struct { Branches []*Branch `json:"branches,omitempty"` - FullProductNames []*FullProductName `json:"full_product_name,omitempty"` - ProductGroups *ProductGroups `json:"product_groups,omitempty"` + FullProductNames []*FullProductName `json:"full_product_names,omitempty"` + ProductGroups []*ProductGroup `json:"product_groups,omitempty"` RelationShips []*Relationship `json:"relationships,omitempty"` } From b5db976f053a8414e2302fad1eee4810d7d6e5f8 Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Tue, 12 Sep 2023 16:21:00 +0200 Subject: [PATCH 018/235] completed validation of product tree --- csaf/advisory.go | 72 ++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 66 insertions(+), 6 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index 14ed3fa..f98c2c0 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -134,6 +134,9 @@ type FullProductName struct { ProductIdentificationHelper *ProductIdentificationHelper `json:"product_identification_helper,omitempty"` } +// FullProductNames is a list of FullProductName. +type FullProductNames []FullProductName + // Branch reflects the 'branch' object in the list of branches. // It may contain either the property Branches OR Product. // If the category is 'product_version' the name MUST NOT contain @@ -380,15 +383,18 @@ type Relationship struct { } +// Relationships is a list of Relationship. +type Relationships []Relationship + // Branches is a list of Branch. type Branches []*Branch // ProductTree contains product names that can be referenced elsewhere in the document. type ProductTree struct { - Branches Branches `json:"branches,omitempty"` - FullProductNames []*FullProductName `json:"full_product_names,omitempty"` - ProductGroups *ProductGroups `json:"product_groups,omitempty"` - RelationShips []*Relationship `json:"relationships,omitempty"` + Branches Branches `json:"branches,omitempty"` + FullProductNames *FullProductNames `json:"full_product_names,omitempty"` + ProductGroups *ProductGroups `json:"product_groups,omitempty"` + RelationShips *Relationships `json:"relationships,omitempty"` } // CVE holds the MITRE standard Common Vulnerabilities and Exposures (CVE) tracking number for a vulnerability. @@ -746,11 +752,14 @@ type Vulnerability struct { Title *string `json:"title,omitempty"` } +// Vulnerabilities is a list of Vulnerability +type Vulnerabilities []*Vulnerability + // Advisory represents a CSAF advisory. type Advisory struct { Document *Document `json:"document"` // required ProductTree *ProductTree `json:"product_tree,omitempty"` - Vulnerabilities []*Vulnerability `json:"vulnerabilities,omitempty"` + Vulnerabilities *Vulnerabilities `json:"vulnerabilities,omitempty"` } // Validate validates a AggregateSeverity. @@ -1023,6 +1032,16 @@ func (fpn *FullProductName) Validate() error { return nil } +// Validate validates a list of Relationship elements. +func (fpns FullProductNames) Validate() error { + for i, f := range fpns { + if err := f.Validate(); err != nil { + return fmt.Errorf("%d. full product name is invalid: %w", i+1, err) + } + } + return nil +} + // Validate validates a single Branch. func (b *Branch) Validate() error { switch { @@ -1039,6 +1058,24 @@ func (b *Branch) Validate() error { return b.Branches.Validate() } +// Validate validates a single Relationship. +func (r *Relationship) Validate() error { + switch { + case r.Category == nil: + return errors.New("'category' is missing") + case r.ProductReference == nil: + return errors.New("'product_reference' is missing") + case r.RelatesToProductReference == nil: + return errors.New("'relates_to_product_reference' is missing") + } + if r.FullProductName != nil { + if err := r.FullProductName.Validate(); err != nil { + return fmt.Errorf("'product' is invalid: %w", err) + } + } + return nil +} + // Validate validates a list of branches. func (bs Branches) Validate() error { for i, b := range bs { @@ -1049,9 +1086,32 @@ func (bs Branches) Validate() error { return nil } +// Validate validates a list of Relationship elements. +func (rs Relationships) Validate() error { + for i, r := range rs { + if err := r.Validate(); err != nil { + return fmt.Errorf("%d. relationship is invalid: %w", i+1, err) + } + } + return nil +} + // Validate validates a ProductTree. func (pt *ProductTree) Validate() error { - return pt.Branches.Validate() + if err := pt.Branches.Validate(); err != nil { + return fmt.Errorf("'branches' is invalid: %w", err) + } + if pt.FullProductNames != nil { + if err := pt.FullProductNames.Validate(); err != nil { + return fmt.Errorf("'full_product_names is invalid: %w", err) + } + } + if pt.RelationShips != nil { + if err := pt.RelationShips.Validate(); err != nil { + return fmt.Errorf("'relationships' is invalid: %w", err) + } + } + return nil } // Validate checks if the advisory is valid. From c8f1361c52e769fdc23f9b426583be71de306e50 Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Tue, 12 Sep 2023 17:26:28 +0200 Subject: [PATCH 019/235] added validation for vulnerabilites --- csaf/advisory.go | 292 +++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 271 insertions(+), 21 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index f98c2c0..9ee219a 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -25,6 +25,9 @@ type Acknowledgement struct { URLs []*string `json:"urls,omitempty"` } +// Acknowledgements is a list of Acknowledgement elements. +type Acknowledgements []*Acknowledgement + // BranchCategory is the category of a branch. type BranchCategory string @@ -75,7 +78,7 @@ var csafBranchCategoryPattern = alternativesUnmarshal( type ProductID string // Products is a list of one or more unique ProductID elements. -type Products []ProductID +type Products []*ProductID // FileHashValue represents the value of a hash. type FileHashValue string @@ -135,7 +138,7 @@ type FullProductName struct { } // FullProductNames is a list of FullProductName. -type FullProductNames []FullProductName +type FullProductNames []*FullProductName // Branch reflects the 'branch' object in the list of branches. // It may contain either the property Branches OR Product. @@ -322,7 +325,7 @@ var langPattern = patternUnmarshal("^(([A-Za-z]{2,3}(-[A-Za-z]{3}(-[A-Za-z]{3}){ // Document contains meta-data about an advisory. type Document struct { - Acknowledgements []*Acknowledgement `json:"acknowledgements,omitempty"` + Acknowledgements *Acknowledgements `json:"acknowledgements,omitempty"` AggregateSeverity *AggregateSeverity `json:"aggregate_severity,omitempty"` Category *DocumentCategory `json:"category"` // required CSAFVersion *Version `json:"csaf_version"` // required @@ -348,7 +351,7 @@ type ProductGroup struct { // ProductGroups is a list of ProductGroupIDs type ProductGroups struct { - ProductGroupIDs []ProductGroupID `json:"product_group_ids"` // unique elements + ProductGroupIDs []*ProductGroupID `json:"product_group_ids"` // unique elements } // RelationshipCategory is the category of a relationship. @@ -384,7 +387,7 @@ type Relationship struct { } // Relationships is a list of Relationship. -type Relationships []Relationship +type Relationships []*Relationship // Branches is a list of Branch. type Branches []*Branch @@ -441,17 +444,23 @@ var csafFlagLabelPattern = alternativesUnmarshal( // code why a product is not affected. type Flag struct { Date *string `json:"date,omitempty"` - GroupIds *ProductGroups `json:"group_ids,omitempty"` + GroupIDs *ProductGroups `json:"group_ids,omitempty"` Label *FlagLabel `json:"label"` // required ProductIds *Products `json:"product_ids,omitempty"` } +// Flags is a list if Flag elements. +type Flags []*Flag + // VulnerabilityID is the identifier of a vulnerability. type VulnerabilityID struct { SystemName *string `json:"system_name"` // required Text *string `json:"text"` // required } +// VulneratilityIDs is a list of VulnerabilityID elements. +type VulnerabilityIDs []*VulnerabilityID + // InvolvementParty is the party of an involvement. type InvolvementParty string @@ -513,6 +522,9 @@ type Involvement struct { Summary *string `json:"summary,omitempty"` } +// Involvements is a list of Involvement elements. +type Involvements []*Involvement + // ProductStatus contains different lists of ProductIDs which provide details on // the status of the referenced product related to the current vulnerability. type ProductStatus struct { @@ -603,6 +615,9 @@ type Remediation struct { URL *string `json:"url,omitempty"` } +// Remediations is a list of Remediation elements. +type Remediations []*Remediation + // CVSSVersion2 is the version of a CVSS2 item. type CVSSVersion2 string @@ -701,6 +716,9 @@ type Score struct { Products *Products `json:"products"` // required } +// Scores is a list of Score elements. +type Scores []*Score + // ThreatCategory is the category of a threat. type ThreatCategory string @@ -727,6 +745,9 @@ type Threat struct { ProductIds *Products `json:"product_ids,omitempty"` } +// Threats is a list of Threat elements. +type Threats []*Threat + // Notes is a list of Note. type Notes []*Note @@ -735,21 +756,21 @@ type References []*Reference // Vulnerability contains all fields that are related to a single vulnerability in the document. type Vulnerability struct { - Acknowledgements []*Acknowledgement `json:"acknowledgements,omitempty"` - CVE *CVE `json:"cve,omitempty"` - CWE *CWE `json:"cwe,omitempty"` - DiscoveryDate *string `json:"discovery_date,omitempty"` - Flags []*Flag `json:"flags,omitempty"` - Ids []*VulnerabilityID `json:"ids,omitempty"` // unique ID elements - Involvements []*Involvement `json:"involvements,omitempty"` - Notes Notes `json:"notes,omitempty"` - ProductStatus *ProductStatus `json:"product_status,omitempty"` - References References `json:"references,omitempty"` - ReleaseDate *string `json:"release_date,omitempty"` - Remediations []*Remediation `json:"remediations,omitempty"` - Scores []*Score `json:"scores,omitempty"` - Threats []*Threat `json:"threats,omitempty"` - Title *string `json:"title,omitempty"` + Acknowledgements *Acknowledgements `json:"acknowledgements,omitempty"` + CVE *CVE `json:"cve,omitempty"` + CWE *CWE `json:"cwe,omitempty"` + DiscoveryDate *string `json:"discovery_date,omitempty"` + Flags *Flags `json:"flags,omitempty"` + IDs *VulnerabilityIDs `json:"ids,omitempty"` // unique ID elements + Involvements *Involvements `json:"involvements,omitempty"` + Notes Notes `json:"notes,omitempty"` + ProductStatus *ProductStatus `json:"product_status,omitempty"` + References References `json:"references,omitempty"` + ReleaseDate *string `json:"release_date,omitempty"` + Remediations *Remediations `json:"remediations,omitempty"` + Scores *Scores `json:"scores,omitempty"` + Threats *Threats `json:"threats,omitempty"` + Title *string `json:"title,omitempty"` } // Vulnerabilities is a list of Vulnerability @@ -1114,6 +1135,232 @@ func (pt *ProductTree) Validate() error { return nil } +// Validate validates a single Flag. +func (f *Flag) Validate() error { + if f.Label == nil { + return errors.New("'label' is missing") + } + return nil +} + +// Validate validates a list of Flag elements. +func (fs Flags) Validate() error { + for i, f := range fs { + if err := f.Validate(); err != nil { + return fmt.Errorf("%d. flag is invalid: %w", i+1, err) + } + } + return nil +} + +// Validate validates a CWE. +func (cwe *CWE) Validate() error { + switch { + case cwe.ID == nil: + return errors.New("'id' is missing") + case cwe.Name == nil: + return errors.New("'name' is missing") + } + return nil +} + +// Validate validates a single VulnerabilityID. +func (id *VulnerabilityID) Validate() error { + switch { + case id.SystemName == nil: + return errors.New("'system_name' is missing") + case id.Text == nil: + return errors.New("'text' is missing") + } + return nil +} + +// Validate validates a list of VulnerabilityID elements. +func (ids VulnerabilityIDs) Validate() error { + for i, id := range ids { + if err := id.Validate(); err != nil { + return fmt.Errorf("%d. vulnerability id is invalid: %w", i+1, err) + } + } + return nil +} + +// Validate validates a single Involvement. +func (iv *Involvement) Validate() error { + switch { + case iv.Party == nil: + return errors.New("'party' is missing") + case iv.Status == nil: + return errors.New("'status' is missing") + } + return nil +} + +// Validate validates a list of Involvement elements. +func (ivs Involvements) Validate() error { + for i, iv := range ivs { + if err := iv.Validate(); err != nil { + return fmt.Errorf("%d. involvement is invalid: %w", i+1, err) + } + } + return nil +} + +// Validate validates a RestartRequired. +func (rr *RestartRequired) Validate() error { + if rr.Category == nil { + return errors.New("'category' is missing") + } + return nil +} + +// Validate validates a CVSS2 +func (c *CVSS2) Validate() error { + switch { + case c.Version == nil: + return errors.New("'version' is missing") + case c.VectorString == nil: + return errors.New("'vectorString' is missing") + case c.BaseScore == nil: + return errors.New("'baseScore' is missing") + } + return nil +} + +// Validate validates a CVSS3 +func (c *CVSS3) Validate() error { + switch { + case c.Version == nil: + return errors.New("'version' is missing") + case c.VectorString == nil: + return errors.New("'vectorString' is missing") + case c.BaseScore == nil: + return errors.New("'baseScore' is missing") + case c.BaseSeverity == nil: + return errors.New("'baseSeverity' is missing") + } + return nil +} + +// Validate validates a single Score. +func (s *Score) Validate() error { + if s.Products == nil { + return errors.New("'products' is missing") + } + if s.CVSS2 != nil { + if err := s.CVSS2.Validate(); err != nil { + return fmt.Errorf("'cvss_v2' is invalid: %w", err) + } + } + if s.CVSS3 != nil { + if err := s.CVSS3.Validate(); err != nil { + return fmt.Errorf("'cvss_v3' is invalid: %w", err) + } + } + return nil +} + +// Validate validates a list of Score elements. +func (ss Scores) Validate() error { + for i, s := range ss { + if err := s.Validate(); err != nil { + return fmt.Errorf("%d. score is invalid: %w", i+1, err) + } + } + return nil +} + +// Validate validates a single Remediation. +func (r *Remediation) Validate() error { + switch { + case r.Category == nil: + return errors.New("'category' is missing") + case r.Details == nil: + return errors.New("'details' is missing") + } + if r.RestartRequired != nil { + if err := r.RestartRequired.Validate(); err != nil { + return fmt.Errorf("'restart_required' is invalid: %w", err) + } + } + return nil +} + +// Validate validates a list of Remediation elements. +func (rms Remediations) Validate() error { + for i, r := range rms { + if err := r.Validate(); err != nil { + return fmt.Errorf("%d. remediation is invalid: %w", i+1, err) + } + } + return nil +} + +// Validate validates a single Threat. +func (t *Threat) Validate() error { + switch { + case t.Category == nil: + return errors.New("'category' is missing") + case t.Details == nil: + return errors.New("'details' is missing") + } + return nil +} + +// Validate validates a list of Threat elements. +func (ts Threats) Validate() error { + for i, t := range ts { + if err := t.Validate(); err != nil { + return fmt.Errorf("%d. threat is invalid: %w", i+1, err) + } + } + return nil +} + +// Validate validates a single Vulnerability. +func (v *Vulnerability) Validate() error { + if v.CWE != nil { + if err := v.CWE.Validate(); err != nil { + return fmt.Errorf("'cwe' is invalid: %w", err) + } + } + if err := v.Flags.Validate(); err != nil { + return fmt.Errorf("'flags' is invalid: %w", err) + } + if err := v.IDs.Validate(); err != nil { + return fmt.Errorf("'ids' is invalid: %w", err) + } + if err := v.Involvements.Validate(); err != nil { + return fmt.Errorf("'involvements' is invalid: %w", err) + } + if err := v.Notes.Validate(); err != nil { + return fmt.Errorf("'notes' is invalid: %w", err) + } + if err := v.References.Validate(); err != nil { + return fmt.Errorf("'references' is invalid: %w", err) + } + if err := v.Remediations.Validate(); err != nil { + return fmt.Errorf("'remediations' is invalid: %w", err) + } + if err := v.Scores.Validate(); err != nil { + return fmt.Errorf("'scores' is invalid: %w", err) + } + if err := v.Threats.Validate(); err != nil { + return fmt.Errorf("'threats' is invalid: %w", err) + } + return nil +} + +// Validate validates a list of Vulnerability elements. +func (vs Vulnerabilities) Validate() error { + for i, v := range vs { + if err := v.Validate(); err != nil { + return fmt.Errorf("%d. vulnerability is invalid: %w", i+1, err) + } + } + return nil +} + // Validate checks if the advisory is valid. // Returns an error if the validation fails otherwise nil. func (adv *Advisory) Validate() error { @@ -1128,6 +1375,9 @@ func (adv *Advisory) Validate() error { return fmt.Errorf("'product_tree' is invalid: %w", err) } } + if err := adv.Vulnerabilities.Validate(); err != nil { + return fmt.Errorf("'vulnerabilities' is invalid: %w", err) + } return nil } From 37cdda7c42cdbe245d784819f37677b71b47a3bb Mon Sep 17 00:00:00 2001 From: Christoph Klassen Date: Wed, 13 Sep 2023 08:01:00 +0200 Subject: [PATCH 020/235] dont use pointer for lists of elements --- csaf/advisory.go | 42 ++++++++++++++++++++++-------------------- 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index 9ee219a..5955b10 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -756,21 +756,21 @@ type References []*Reference // Vulnerability contains all fields that are related to a single vulnerability in the document. type Vulnerability struct { - Acknowledgements *Acknowledgements `json:"acknowledgements,omitempty"` - CVE *CVE `json:"cve,omitempty"` - CWE *CWE `json:"cwe,omitempty"` - DiscoveryDate *string `json:"discovery_date,omitempty"` - Flags *Flags `json:"flags,omitempty"` - IDs *VulnerabilityIDs `json:"ids,omitempty"` // unique ID elements - Involvements *Involvements `json:"involvements,omitempty"` - Notes Notes `json:"notes,omitempty"` - ProductStatus *ProductStatus `json:"product_status,omitempty"` - References References `json:"references,omitempty"` - ReleaseDate *string `json:"release_date,omitempty"` - Remediations *Remediations `json:"remediations,omitempty"` - Scores *Scores `json:"scores,omitempty"` - Threats *Threats `json:"threats,omitempty"` - Title *string `json:"title,omitempty"` + Acknowledgements Acknowledgements `json:"acknowledgements,omitempty"` + CVE *CVE `json:"cve,omitempty"` + CWE *CWE `json:"cwe,omitempty"` + DiscoveryDate *string `json:"discovery_date,omitempty"` + Flags Flags `json:"flags,omitempty"` + IDs VulnerabilityIDs `json:"ids,omitempty"` // unique ID elements + Involvements Involvements `json:"involvements,omitempty"` + Notes Notes `json:"notes,omitempty"` + ProductStatus *ProductStatus `json:"product_status,omitempty"` + References References `json:"references,omitempty"` + ReleaseDate *string `json:"release_date,omitempty"` + Remediations Remediations `json:"remediations,omitempty"` + Scores Scores `json:"scores,omitempty"` + Threats Threats `json:"threats,omitempty"` + Title *string `json:"title,omitempty"` } // Vulnerabilities is a list of Vulnerability @@ -778,9 +778,9 @@ type Vulnerabilities []*Vulnerability // Advisory represents a CSAF advisory. type Advisory struct { - Document *Document `json:"document"` // required - ProductTree *ProductTree `json:"product_tree,omitempty"` - Vulnerabilities *Vulnerabilities `json:"vulnerabilities,omitempty"` + Document *Document `json:"document"` // required + ProductTree *ProductTree `json:"product_tree,omitempty"` + Vulnerabilities Vulnerabilities `json:"vulnerabilities,omitempty"` } // Validate validates a AggregateSeverity. @@ -1375,8 +1375,10 @@ func (adv *Advisory) Validate() error { return fmt.Errorf("'product_tree' is invalid: %w", err) } } - if err := adv.Vulnerabilities.Validate(); err != nil { - return fmt.Errorf("'vulnerabilities' is invalid: %w", err) + if adv.Vulnerabilities != nil { + if err := adv.Vulnerabilities.Validate(); err != nil { + return fmt.Errorf("'vulnerabilities' is invalid: %w", err) + } } return nil } From 20b2bd27b35496f48296f8f47024ee90acb1bb36 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Wed, 13 Sep 2023 14:35:22 +0200 Subject: [PATCH 021/235] Fix typo in comment. --- csaf/advisory.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index 5955b10..95a6821 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -458,7 +458,7 @@ type VulnerabilityID struct { Text *string `json:"text"` // required } -// VulneratilityIDs is a list of VulnerabilityID elements. +// VulnerabilityIDs is a list of VulnerabilityID elements. type VulnerabilityIDs []*VulnerabilityID // InvolvementParty is the party of an involvement. From d69101924b6e6bde7e44c2452df7c671caaf21be Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 6 Oct 2023 12:53:25 +0200 Subject: [PATCH 022/235] Add build for macOS --- Makefile | 16 ++++++++++------ README.md | 8 +++++--- 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/Makefile b/Makefile index 4fe3d97..154b24d 100644 --- a/Makefile +++ b/Makefile @@ -12,15 +12,15 @@ SHELL = /bin/bash BUILD = go build MKDIR = mkdir -p -.PHONY: build build_linux build_win tag_checked_out mostlyclean +.PHONY: build build_linux build_win build_mac_amd64 build_mac_arm64 tag_checked_out mostlyclean all: - @echo choose a target from: build build_linux build_win mostlyclean + @echo choose a target from: build build_linux build_win build_mac_amd64 build_mac_arm64 mostlyclean @echo prepend \`make BUILDTAG=1\` to checkout the highest git tag before building @echo or set BUILDTAG to a specific tag # Build all binaries -build: build_linux build_win +build: build_linux build_win build_mac_amd64 build_mac_arm64 # if BUILDTAG == 1 set it to the highest git tag ifeq ($(strip $(BUILDTAG)),1) @@ -29,7 +29,7 @@ endif ifdef BUILDTAG # add the git tag checkout to the requirements of our build targets -build_linux build_win: tag_checked_out +build_linux build_win build_mac_amd64 build_mac_arm64: tag_checked_out endif tag_checked_out: @@ -67,15 +67,19 @@ LDFLAGS = -ldflags "-X github.com/csaf-poc/csaf_distribution/v3/util.SemVersion= GOARCH = amd64 build_linux: GOOS = linux build_win: GOOS = windows +build_mac_amd64: GOOS = darwin -build_linux build_win: +build_mac_arm64: GOARCH = arm64 +build_mac_arm64: GOOS = darwin + +build_linux build_win build_mac_amd64 build_mac_arm64: $(eval BINDIR = bin-$(GOOS)-$(GOARCH)/ ) $(MKDIR) $(BINDIR) env GOARCH=$(GOARCH) GOOS=$(GOOS) $(BUILD) -o $(BINDIR) $(LDFLAGS) -v ./cmd/... DISTDIR := csaf_distribution-$(SEMVER) -dist: build_linux build_win +dist: build_linux build_win build_mac_amd64 build_mac_arm64 mkdir -p dist mkdir -p dist/$(DISTDIR)-windows-amd64/bin-windows-amd64 cp README.md dist/$(DISTDIR)-windows-amd64 diff --git a/README.md b/README.md index 6003f70..5c1a8a7 100644 --- a/README.md +++ b/README.md @@ -47,9 +47,11 @@ Download the binaries from the most recent release assets on Github. - Build Go components Makefile supplies the following targets: - Build For GNU/Linux System: `make build_linux` - - Build For Windows System (cross build): `make build_win` - - Build For both linux and windows: `make build` - - Build from a specific github tag by passing the intended tag to the `BUILDTAG` variable. + - Build For Windows System (cross build): `make build_win` + - Build For macOS on Intel Processor (AMD64) (cross build): `make build_mac_amd64` + - Build For macOS on Apple Silicon (ARM64) (cross build): `make build_mac_arm64` + - Build For Linux, Mac and Windows: `make build` + - Build from a specific GitHub tag by passing the intended tag to the `BUILDTAG` variable. E.g. `make BUILDTAG=v1.0.0 build` or `make BUILDTAG=1 build_linux`. The special value `1` means checking out the highest github tag for the build. - Remove the generated binaries und their directories: `make mostlyclean` From 5f2596665a46ed735d65e327b30933e907d76523 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Thu, 12 Oct 2023 12:07:40 +0200 Subject: [PATCH 023/235] Add new binary, the searcher(, the main and docs) from https://github.com/cintek/csaf_advisory_example+ --- cmd/csaf_searcher/main.go | 126 ++++++++++++++++++++++++++++++++++++++ docs/csaf_searcher.md | 16 +++++ 2 files changed, 142 insertions(+) create mode 100644 cmd/csaf_searcher/main.go create mode 100644 docs/csaf_searcher.md diff --git a/cmd/csaf_searcher/main.go b/cmd/csaf_searcher/main.go new file mode 100644 index 0000000..871433d --- /dev/null +++ b/cmd/csaf_searcher/main.go @@ -0,0 +1,126 @@ +// Package main implements a simple demo program to +// work with the csaf_distribution library. +package main + +import ( + "flag" + "fmt" + "log" + "os" + "slices" + "strings" + + "github.com/csaf-poc/csaf_distribution/v2/csaf" +) + +func main() { + flag.Usage = func() { + fmt.Fprintf(flag.CommandLine.Output(), + "Usage:\n %s [OPTIONS] files...\n\nOptions:\n", os.Args[0]) + flag.PrintDefaults() + } + idsString := flag.String("p", "", "ID1,ID2,...") + flag.Parse() + + files := flag.Args() + if len(files) == 0 { + log.Println("No files given.") + return + } + if err := run(files, *idsString); err != nil { + log.Fatalf("error: %v\n", err) + } +} + +// run prints PURLs belonging to the given Product IDs. +func run(files []string, ids string) error { + + uf := newURLFinder(strings.Split(ids, ",")) + + for _, file := range files { + adv, err := csaf.LoadAdvisory(file) + if err != nil { + return fmt.Errorf("loading %q failed: %w", file, err) + } + uf.findURLs(adv) + uf.dumpURLs() + uf.clear() + } + + return nil +} + +// urlFinder helps to find the URLs of a set of product ids in advisories. +type urlFinder struct { + ids []csaf.ProductID + urls [][]csaf.PURL +} + +// newURLFinder creates a new urlFinder for given ids. +func newURLFinder(ids []string) *urlFinder { + uf := &urlFinder{ + ids: make([]csaf.ProductID, len(ids)), + urls: make([][]csaf.PURL, len(ids)), + } + for i := range uf.ids { + uf.ids[i] = csaf.ProductID(ids[i]) + } + return uf +} + +// clear resets the url finder after a run on an advisory. +func (uf *urlFinder) clear() { + clear(uf.urls) +} + +// dumpURLs dumps the found URLs to stdout. +func (uf *urlFinder) dumpURLs() { + for i, urls := range uf.urls { + if len(urls) == 0 { + continue + } + fmt.Printf("Found URLs for %s:\n", uf.ids[i]) + for j, url := range urls { + fmt.Printf("%d. %s\n", j+1, url) + } + } +} + +// findURLs find the URLs in an advisory. +func (uf *urlFinder) findURLs(adv *csaf.Advisory) { + tree := adv.ProductTree + if tree == nil { + return + } + + // If we have found it and we have a valid URL add unique. + add := func(idx int, h *csaf.ProductIdentificationHelper) { + if idx != -1 && h != nil && h.PURL != nil && + !slices.Contains(uf.urls[idx], *h.PURL) { + uf.urls[idx] = append(uf.urls[idx], *h.PURL) + } + } + + // First iterate over full product names. + if names := tree.FullProductNames; names != nil { + for _, name := range *names { + if name != nil && name.ProductID != nil { + add(slices.Index(uf.ids, *name.ProductID), name.ProductIdentificationHelper) + } + } + } + + // Second traverse the branches recursively. + var recBranch func(*csaf.Branch) + recBranch = func(b *csaf.Branch) { + if p := b.Product; p != nil && p.ProductID != nil { + add(slices.Index(uf.ids, *p.ProductID), p.ProductIdentificationHelper) + } + for _, c := range b.Branches { + recBranch(c) + } + } + for _, b := range tree.Branches { + recBranch(b) + } +} diff --git a/docs/csaf_searcher.md b/docs/csaf_searcher.md new file mode 100644 index 0000000..e821025 --- /dev/null +++ b/docs/csaf_searcher.md @@ -0,0 +1,16 @@ +# csaf_advisory_example + +This is a small searcher using the advisory model to search for PURLs belonging to a product ID in an advisory of the CSAF 2.0 standard. + +Usage: +``` + +csaf_advisory_example OPTIONS [files...] + +Application Options: +-p The Product ID + +Help Options: +-h, --help Show a help message + +``` From e354e4b20102f2538c004fb6b31d8f305ff110ae Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 13 Oct 2023 17:21:25 +0200 Subject: [PATCH 024/235] docs: add note about support level of MacOS builds --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index 5c1a8a7..6377488 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,12 @@ They are likely to run on similar systems when build from sources. The windows binary package only includes `csaf_downloader`, `csaf_validator`, `csaf_checker` and `csaf_uploader`. +The MacOS binary archives come with the same set of client tools +and are _community supported_. Which means: +while they are expected to run fine, +they are not at the same level of testing and maintenance +as the Windows and GNU/Linux binaries. + ### Prebuild binaries From 1e506d46cc7e03d61d499762e9154b2215a29fe3 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 13 Oct 2023 17:52:14 +0200 Subject: [PATCH 025/235] feat: add macos binaries archive to Makefile --- Makefile | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/Makefile b/Makefile index 154b24d..19e31c7 100644 --- a/Makefile +++ b/Makefile @@ -89,10 +89,19 @@ dist: build_linux build_win build_mac_amd64 build_mac_arm64 mkdir -p dist/$(DISTDIR)-windows-amd64/docs cp docs/csaf_uploader.md docs/csaf_validator.md docs/csaf_checker.md \ docs/csaf_downloader.md dist/$(DISTDIR)-windows-amd64/docs + mkdir -p dist/$(DISTDIR)-macos/bin-darwin-amd64 \ + dist/$(DISTDIR)-macos/bin-darwin-arm64 \ + dist/$(DISTDIR)-macos/docs + for f in csaf_downloader csaf_checker csaf_validator csaf_uploader ; do \ + cp bin-darwin-amd64/$$f dist/$(DISTDIR)-macos/bin-darwin-amd64 ; \ + cp bin-darwin-arm64/$$f dist/$(DISTDIR)-macos/bin-darwin-arm64 ; \ + cp docs/$${f}.md dist/$(DISTDIR)-macos/docs ; \ + done mkdir dist/$(DISTDIR)-gnulinux-amd64 cp -r README.md docs bin-linux-amd64 dist/$(DISTDIR)-gnulinux-amd64 cd dist/ ; zip -r $(DISTDIR)-windows-amd64.zip $(DISTDIR)-windows-amd64/ cd dist/ ; tar -cvmlzf $(DISTDIR)-gnulinux-amd64.tar.gz $(DISTDIR)-gnulinux-amd64/ + cd dist/ ; tar -cvmlzf $(DISTDIR)-macos.tar.gz $(DISTDIR)-macos # Remove bin-*-* and dist directories mostlyclean: From 3923dc7044df48f652288c97a374163a7f84dd25 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 17 Oct 2023 11:33:03 +0200 Subject: [PATCH 026/235] fix: improve logging for downloader and aggregator * use full name for printing out the used logfile for the downloader. * for debug or verbose, log the timeintervall that will be used for downloader and aggregator. (The checker has this as part of its output already.) --- cmd/csaf_aggregator/config.go | 12 ++++++++++-- cmd/csaf_downloader/config.go | 2 +- cmd/csaf_downloader/downloader.go | 2 ++ 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 34cfb81..596c056 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -12,6 +12,7 @@ import ( "crypto/tls" "errors" "fmt" + "log" "net/http" "os" "runtime" @@ -166,14 +167,21 @@ func (c *config) tooOldForInterims() func(time.Time) bool { // is in the accepted download interval of the provider or // the global config. func (p *provider) ageAccept(c *config) func(time.Time) bool { + var r *models.TimeRange switch { case p.Range != nil: - return p.Range.Contains + r = p.Range case c.Range != nil: - return c.Range.Contains + r = c.Range default: return nil } + + if c.Verbose { + s, _ := r.MarshalJSON() + log.Printf("Setting up filter to accept docs within TimeRange %s", s) + } + return r.Contains } // ignoreFile returns true if the given URL should not be downloaded. diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 17c8545..0c1ade1 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -214,7 +214,7 @@ func (cfg *config) prepareLogging() error { if err != nil { return err } - log.Printf("using %q for logging\n", *cfg.LogFile) + log.Printf("using %q for logging\n", fname) w = f } ho := slog.HandlerOptions{ diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 31c6286..5739577 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -204,6 +204,8 @@ func (d *downloader) download(ctx context.Context, domain string) error { // Do we need time range based filtering? if d.cfg.Range != nil { + slog.Debug("Setting up filter to accept documents within", + "TimeRange", d.cfg.Range) afp.AgeAccept = d.cfg.Range.Contains } From 8f6e6ee8bb4018956ce7288703cb55daf2b9ad67 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Tue, 17 Oct 2023 18:52:38 +0200 Subject: [PATCH 027/235] improve logging output --- cmd/csaf_aggregator/config.go | 5 +++-- cmd/csaf_downloader/downloader.go | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 596c056..ecc88dc 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -178,8 +178,9 @@ func (p *provider) ageAccept(c *config) func(time.Time) bool { } if c.Verbose { - s, _ := r.MarshalJSON() - log.Printf("Setting up filter to accept docs within TimeRange %s", s) + log.Printf( + "Setting up filter to accept advisories within time range %s to %s\n", + r[0].Format(time.RFC3339), r[1].Format(time.RFC3339)) } return r.Contains } diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 5739577..d40f31b 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -204,8 +204,8 @@ func (d *downloader) download(ctx context.Context, domain string) error { // Do we need time range based filtering? if d.cfg.Range != nil { - slog.Debug("Setting up filter to accept documents within", - "TimeRange", d.cfg.Range) + slog.Debug("Setting up filter to accept advisories within", + "timerange", d.cfg.Range) afp.AgeAccept = d.cfg.Range.Contains } From abc8b109882528996c799ffe249200c6383c4466 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Tue, 17 Oct 2023 18:53:53 +0200 Subject: [PATCH 028/235] docs: improve timerange documentation (#482) * docs: improve timerange documentation * add a documentation section to the downloader docs for the timerange-option. * point aggregator and checker docs to the downloader section for timerange. * docs: use a better example for timerange minutes --- docs/csaf_aggregator.md | 2 +- docs/csaf_checker.md | 31 +++----------------------- docs/csaf_downloader.md | 49 +++++++++++++++++++++++------------------ 3 files changed, 32 insertions(+), 50 deletions(-) diff --git a/docs/csaf_aggregator.md b/docs/csaf_aggregator.md index 08cbae0..9769b27 100644 --- a/docs/csaf_aggregator.md +++ b/docs/csaf_aggregator.md @@ -109,7 +109,7 @@ client_cert // path to client certificate to access access-protected client_key // path to client key to access access-protected advisories client_passphrase // optional client cert passphrase (limited, experimental, see downloader doc) header // adds extra HTTP header fields to the client -timerange // Accepted time range of advisories to handle. See checker doc for details. +timerange // Accepted time range of advisories to handle. See downloader docs for details. ``` Next we have two TOML _tables_: diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index 9541b5f..1db8292 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -69,35 +69,10 @@ type 2: error The checker result is a success if no checks resulted in type 2, and a failure otherwise. -The option `timerange` allows to only check advisories from a given time interval. -It is only allowed to specify one off them. -There are following variants: +The option `timerange` allows to only check advisories from a given time +interval. It can only be given once. See the +[downloader documentation](csaf_downloader.md#timerange-option) for details. -1. Relative. If the given string follows the rules of being a [Go duration](https://pkg.go.dev/time@go1.20.6#ParseDuration) - the time interval from now minus that duration till now is used. - E.g. `"3h"` means checking the advisories that have changed in the last three hours. - -2. Absolute. If the given string is an RFC 3339 date timestamp the time interval between - this date and now is used. - E.g. `"2006-01-02"` means that all files between 2006 January 2nd and now going to be - checked. - Accepted patterns are: - - `"2006-01-02T15:04:05Z"` - - `"2006-01-02T15:04:05+07:00"` - - `"2006-01-02T15:04:05-07:00"` - - `"2006-01-02T15:04:05"` - - `"2006-01-02T15:04"` - - `"2006-01-02T15"` - - `"2006-01-02"` - - `"2006-01"` - - `"2006"` - - Missing parts are set to the smallest value possible in that field. - -3. Range. Same as 2 but separated by a `,` to span an interval. e.g `2019,2024` - spans an interval from 1st January 2019 to the 1st January of 2024. - -All interval boundaries are inclusive. You can ignore certain advisories while checking by specifying a list of regular expressions[^1] to match their URLs by using the `ignorepattern` diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 37bc248..7406071 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -79,17 +79,39 @@ forward_queue = 5 forward_insecure = false ``` +If the `folder` option is given all the advisories are stored in a subfolder +of this name. Otherwise the advisories are each stored in a folder named +by the year they are from. + +You can ignore certain advisories while downloading by specifying a list +of regular expressions[^1] to match their URLs by using the `ignorepattern` +option. + +E.g. `-i='.*white.*' -i='*.red.*'` will ignore files which URLs contain +the sub strings **white** or **red**. +In the config file this has to be noted as: +``` +ignorepattern = [".*white.*", ".*red.*"] +``` + +#### Timerange option + The `timerange` parameter enables downloading advisories which last changes falls into a given intervall. There are three possible notations: -1. Relative. If the given string follows the rules of being a [Go duration](https://pkg.go.dev/time@go1.20.6#ParseDuration) - the time interval from now minus that duration till now is used. - E.g. `"3h"` means downloading the advisories that have changed in the last three hours. +1. Relative. If the given string follows the rules of a + [Go duration](https://pkg.go.dev/time@go1.20.6#ParseDuration), + the time interval from now going back that duration is used. + Some examples: + - `"3h"` means downloading the advisories that have changed in the last three hours. + - `"30m"` .. changed within the last thirty minutes. + - `"72h"` .. changed within the last three days. + - `"8760h"` .. changed within the last 365 days. -2. Absolute. If the given string is an RFC 3339 date timestamp the time interval between - this date and now is used. +2. Absolute. If the given string is an RFC 3339 date timestamp + the time interval between this date and now is used. E.g. `"2006-01-02"` means that all files between 2006 January 2nd and now going to being - downloaded. + downloaded. Accepted patterns are: - `"2006-01-02T15:04:05Z"` - `"2006-01-02T15:04:05+07:00"` @@ -108,21 +130,6 @@ into a given intervall. There are three possible notations: All interval boundaries are inclusive. -If the `folder` option is given all the advisories are stored in a subfolder -of this name. Otherwise the advisories are each stored in a folder named -by the year they are from. - -You can ignore certain advisories while downloading by specifying a list -of regular expressions[^1] to match their URLs by using the `ignorepattern` -option. - -E.g. `-i='.*white.*' -i='*.red.*'` will ignore files which URLs contain -the sub strings **white** or **red**. -In the config file this has to be noted as: -``` -ignorepattern = [".*white.*", ".*red.*"] -``` - #### Forwarding The downloader is able to forward downloaded advisories and their checksums, OpenPGP signatures and validation results to an HTTP endpoint. From d9e579242b7b7bf567f4ee91549a07b484622980 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 18 Oct 2023 10:27:59 +0200 Subject: [PATCH 029/235] Added csaf_searcher to README --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 1dd0233..3a73283 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,9 @@ Can be used for automated forwarding of CSAF documents. ### [csaf_validator](docs/csaf_validator.md) is a tool to validate local advisories files against the JSON Schema and an optional remote validator. +### [csaf_searcher](docs/csaf_searcher.md) +is a tool to search through local advisories. It finds PURLs based on the product ID of an advisory. + ## Tools for advisory providers ### [csaf_provider](docs/csaf_provider.md) From 5215d78331cb59380792312b5860054e8edee372 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 18 Oct 2023 11:22:32 +0200 Subject: [PATCH 030/235] Adjust requirement --- cmd/csaf_searcher/main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_searcher/main.go b/cmd/csaf_searcher/main.go index 871433d..80c308e 100644 --- a/cmd/csaf_searcher/main.go +++ b/cmd/csaf_searcher/main.go @@ -10,7 +10,7 @@ import ( "slices" "strings" - "github.com/csaf-poc/csaf_distribution/v2/csaf" + "github.com/csaf-poc/csaf_distribution/v3/csaf" ) func main() { From 455010dc6412592bebb5f894799414c2604efab8 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 19 Oct 2023 13:13:11 +0200 Subject: [PATCH 031/235] Accept days, months and years in time ranges. (#483) --- docs/csaf_downloader.md | 13 ++++--- internal/models/models.go | 69 ++++++++++++++++++++++++++++++++-- internal/models/models_test.go | 31 +++++++++++++++ 3 files changed, 105 insertions(+), 8 deletions(-) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 7406071..0fe4e85 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -102,11 +102,14 @@ into a given intervall. There are three possible notations: 1. Relative. If the given string follows the rules of a [Go duration](https://pkg.go.dev/time@go1.20.6#ParseDuration), the time interval from now going back that duration is used. - Some examples: - - `"3h"` means downloading the advisories that have changed in the last three hours. - - `"30m"` .. changed within the last thirty minutes. - - `"72h"` .. changed within the last three days. - - `"8760h"` .. changed within the last 365 days. + In extension to this the suffixes 'd' for days, 'M' for month + and 'y' for years are recognized. In these cases only integer + values are accepted without any fractions. + Some examples: + - `"3h"` means downloading the advisories that have changed in the last three hours. + - `"30m"` .. changed within the last thirty minutes. + - `"3M2m"` .. changed within the last three months and two minutes. + - `"2y"` .. changed within the last two years. 2. Absolute. If the given string is an RFC 3339 date timestamp the time interval between this date and now is used. diff --git a/internal/models/models.go b/internal/models/models.go index fad85c3..00fead3 100644 --- a/internal/models/models.go +++ b/internal/models/models.go @@ -12,7 +12,10 @@ package models import ( "encoding/json" "fmt" + "regexp" + "strconv" "strings" + "sync" "time" ) @@ -59,6 +62,65 @@ func (tr *TimeRange) UnmarshalText(text []byte) error { return tr.UnmarshalFlag(string(text)) } +var ( + yearsMonthsDays *regexp.Regexp + yearsMonthsDaysOnce sync.Once +) + +// parseDuration extends time.ParseDuration with recognition of +// years, month and days with the suffixes "y", "M" and "d". +// Onlys integer values are detected. The handling of fractional +// values would increase the complexity and may be done in the future. +// The calculate dates are assumed to be before the reference time. +func parseDuration(s string, reference time.Time) (time.Duration, error) { + var ( + extra time.Duration + err error + used bool + ) + parse := func(s string) int { + if err == nil { + var v int + v, err = strconv.Atoi(s) + return v + } + return 0 + } + // Only compile expression if really needed. + yearsMonthsDaysOnce.Do(func() { + yearsMonthsDays = regexp.MustCompile(`[-+]?[0-9]+[yMd]`) + }) + s = yearsMonthsDays.ReplaceAllStringFunc(s, func(part string) string { + used = true + var years, months, days int + switch suf, num := part[len(part)-1], part[:len(part)-1]; suf { + case 'y': + years = -parse(num) + case 'M': + months = -parse(num) + case 'd': + days = -parse(num) + } + date := reference.AddDate(years, months, days) + extra += reference.Sub(date) + // Remove from string + return "" + }) + if err != nil { + return 0, err + } + // If there is no rest we don't need the stdlib parser. + if used && s == "" { + return extra, nil + } + // Parse the rest with the stdlib. + d, err := time.ParseDuration(s) + if err != nil { + return d, err + } + return d + extra, nil +} + // MarshalJSON implements [encoding/json.Marshaler]. func (tr TimeRange) MarshalJSON() ([]byte, error) { s := []string{ @@ -72,9 +134,10 @@ func (tr TimeRange) MarshalJSON() ([]byte, error) { func (tr *TimeRange) UnmarshalFlag(s string) error { s = strings.TrimSpace(s) + now := time.Now() + // Handle relative case first. - if duration, err := time.ParseDuration(s); err == nil { - now := time.Now() + if duration, err := parseDuration(s, now); err == nil { *tr = NewTimeInterval(now.Add(-duration), now) return nil } @@ -88,7 +151,7 @@ func (tr *TimeRange) UnmarshalFlag(s string) error { if !ok { return fmt.Errorf("%q is not a valid RFC date time", a) } - *tr = NewTimeInterval(start, time.Now()) + *tr = NewTimeInterval(start, now) return nil } // Real interval diff --git a/internal/models/models_test.go b/internal/models/models_test.go index ffbcb9c..0217bf7 100644 --- a/internal/models/models_test.go +++ b/internal/models/models_test.go @@ -9,6 +9,7 @@ package models import ( + "strings" "testing" "time" ) @@ -25,6 +26,36 @@ func TestNewTimeInterval(t *testing.T) { } } +func TestParseDuration(t *testing.T) { + + now := time.Now() + + for _, x := range []struct { + in string + expected time.Duration + reference time.Time + fail bool + }{ + {"1h", time.Hour, now, false}, + {"2y", now.Sub(now.AddDate(-2, 0, 0)), now, false}, + {"13M", now.Sub(now.AddDate(0, -13, 0)), now, false}, + {"31d", now.Sub(now.AddDate(0, 0, -31)), now, false}, + {"1h2d3m", now.Sub(now.AddDate(0, 0, -2)) + time.Hour + 3*time.Minute, now, false}, + {strings.Repeat("1", 70) + "y1d", 0, now, true}, + } { + got, err := parseDuration(x.in, x.reference) + if err != nil { + if !x.fail { + t.Errorf("%q should not fail: %v", x.in, err) + } + continue + } + if got != x.expected { + t.Errorf("%q got %v expected %v", x.in, got, x.expected) + } + } +} + // TestGuessDate tests whether a sample of strings are correctly parsed into Dates by guessDate() func TestGuessDate(t *testing.T) { if _, guess := guessDate("2006-01-02T15:04:05"); !guess { From 21fa98186ca4af2d0572e19ade44206150da58b4 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 2 Nov 2023 10:41:24 +0100 Subject: [PATCH 032/235] Use Intevation's JSONPath fork (#490) * Use Intevation fork of github.com/PaesslerAG/jsonpath * Remove passus about double quouted jsonpath strings. --- docs/csaf_provider.md | 2 -- go.mod | 4 ++-- go.sum | 11 +++++------ util/json.go | 4 ++-- 4 files changed, 9 insertions(+), 12 deletions(-) diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index 8b0117a..b02165b 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -116,8 +116,6 @@ The following example file documents all available configuration options: # - vendor, product family and product names: "expr:$.product_tree..branches[?(@.category==\"vendor\" || @.category==\"product_family\" || @.category==\"product_name\")].name" # - CVEs: "expr:$.vulnerabilities[*].cve" # - CWEs: "expr:$.vulnerabilities[*].cwe.id" -# The used implementation to evaluate JSONPath expressions does -# not support the use of single-quotes. Double quotes have to be quoted. # Strings not starting with `expr:` are taken verbatim. # By default no category documents are created. # This example provides an overview over the syntax, diff --git a/go.mod b/go.mod index 1304cc9..c89f97b 100644 --- a/go.mod +++ b/go.mod @@ -4,8 +4,8 @@ go 1.21 require ( github.com/BurntSushi/toml v1.3.2 - github.com/PaesslerAG/gval v1.2.2 - github.com/PaesslerAG/jsonpath v0.1.1 + github.com/Intevation/gval v1.3.0 + github.com/Intevation/jsonpath v0.2.1 github.com/ProtonMail/gopenpgp/v2 v2.7.3 github.com/PuerkitoBio/goquery v1.8.1 github.com/gofrs/flock v0.8.1 diff --git a/go.sum b/go.sum index f5220ad..cdb5a15 100644 --- a/go.sum +++ b/go.sum @@ -1,11 +1,9 @@ github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= -github.com/PaesslerAG/gval v1.0.0/go.mod h1:y/nm5yEyTeX6av0OfKJNp9rBNj2XrGhAf5+v24IBN1I= -github.com/PaesslerAG/gval v1.2.2 h1:Y7iBzhgE09IGTt5QgGQ2IdaYYYOU134YGHBThD+wm9E= -github.com/PaesslerAG/gval v1.2.2/go.mod h1:XRFLwvmkTEdYziLdaCeCa5ImcGVrfQbeNUbVR+C6xac= -github.com/PaesslerAG/jsonpath v0.1.0/go.mod h1:4BzmtoM/PI8fPO4aQGIusjGxGir2BzcV0grWtFzq1Y8= -github.com/PaesslerAG/jsonpath v0.1.1 h1:c1/AToHQMVsduPAa4Vh6xp2U0evy4t8SWp8imEsylIk= -github.com/PaesslerAG/jsonpath v0.1.1/go.mod h1:lVboNxFGal/VwW6d9JzIy56bUsYAP6tH/x80vjnCseY= +github.com/Intevation/gval v1.3.0 h1:+Ze5sft5MmGbZrHj06NVUbcxCb67l9RaPTLMNr37mjw= +github.com/Intevation/gval v1.3.0/go.mod h1:xmGyGpP5be12EL0P12h+dqiYG8qn2j3PJxIgkoOHO5o= +github.com/Intevation/jsonpath v0.2.1 h1:rINNQJ0Pts5XTFEG+zamtdL7l9uuE1z0FBA+r55Sw+A= +github.com/Intevation/jsonpath v0.2.1/go.mod h1:WnZ8weMmwAx/fAO3SutjYFU+v7DFreNYnibV7CiaYIw= github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c h1:kMFnB0vCcX7IL/m9Y5LO+KQYv+t1CQOiFe6+SV2J7bE= github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= @@ -26,6 +24,7 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc= github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= diff --git a/util/json.go b/util/json.go index d475ecb..851974b 100644 --- a/util/json.go +++ b/util/json.go @@ -15,8 +15,8 @@ import ( "fmt" "time" - "github.com/PaesslerAG/gval" - "github.com/PaesslerAG/jsonpath" + "github.com/Intevation/gval" + "github.com/Intevation/jsonpath" ) // ReMarshalJSON transforms data from src to dst via JSON marshalling. From 03a907b9b812a17faef97f21a804a21dc31154da Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 2 Nov 2023 12:19:16 +0100 Subject: [PATCH 033/235] Fix checker doc of TOML config of validator (#492) --- docs/csaf_checker.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index 1db8292..4253785 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -44,7 +44,7 @@ Supported options in config files: ``` output = "" format = "json" -insecure = false +insecure = false # client_cert # not set by default # client_key # not set by default # client_passphrase # not set by default @@ -53,8 +53,8 @@ verbose = false # timerange # not set by default # header # not set by default # validator # not set by default -# validatorcache # not set by default -validatorpreset = ["mandatory"] +# validator_cache # not set by default +validator_preset = ["mandatory"] ``` Usage example: From 7fbc012e2cf9beb263b8c59ac28128241389b746 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 2 Nov 2023 13:06:37 +0100 Subject: [PATCH 034/235] Docs: Add Development.md (#493) * Add docs/Development.md * Fix link --- README.md | 11 ++++------- docs/Development.md | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+), 7 deletions(-) create mode 100644 docs/Development.md diff --git a/README.md b/README.md index 3a73283..4fde787 100644 --- a/README.md +++ b/README.md @@ -59,13 +59,6 @@ Download the binaries from the most recent release assets on Github. Binaries will be placed in directories named like `bin-linux-amd64/` and `bin-windows-amd64/`. -- Maintainers only: No need to do this if you have cloned this repository for unmodified usage only. -``` -go generate ./... -``` -will update the machine generated code. - - ### Setup (Trusted Provider) - [Install](https://nginx.org/en/docs/install.html) **nginx** @@ -73,6 +66,10 @@ will update the machine generated code. - To configure nginx see [docs/provider-setup.md](docs/provider-setup.md) - To configure nginx for client certificate authentication see [docs/client-certificate-setup.md](docs/client-certificate-setup.md) +### Development + +For further details of the development process consult our [development page](./docs/development.md). + ## License diff --git a/docs/Development.md b/docs/Development.md new file mode 100644 index 0000000..0a7004a --- /dev/null +++ b/docs/Development.md @@ -0,0 +1,18 @@ +# Development + +## Generated files + +Some source code files are machine generated. At the moment these are only +[cvss20enums.go](../csaf/cvss20enums.go) and [cvss3enums.go](../csaf/cvss3enums.go) on the +basis of the [Advisory JSON schema](../csaf/schema/csaf_json_schema.json). + +If you change the source files please regenerate the generated files +with `go generate ./...` in the root folder and add the updated files +to the version control. + +If you plan to add further machine generated files ensure that they +are marked with comments like +``` +// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! +``` +. From 26c630df4a9e46dd24ed16fe20a4b789a8516c4a Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 2 Nov 2023 14:12:41 +0100 Subject: [PATCH 035/235] API examples: move csaf_searcher to a lower prio place (#489) * move csaf_searcher to a lower prio place * Adjust wording * Grammar fix #2 'this is work in progress' -> 'This is a work in progress'... --------- Co-authored-by: JanHoefelmeyer --- README.md | 9 ++++++--- examples/README.md | 5 +++++ {cmd/csaf_searcher => examples/purls_searcher}/main.go | 0 3 files changed, 11 insertions(+), 3 deletions(-) create mode 100644 examples/README.md rename {cmd/csaf_searcher => examples/purls_searcher}/main.go (100%) diff --git a/README.md b/README.md index 4fde787..293c9a0 100644 --- a/README.md +++ b/README.md @@ -10,9 +10,6 @@ Can be used for automated forwarding of CSAF documents. ### [csaf_validator](docs/csaf_validator.md) is a tool to validate local advisories files against the JSON Schema and an optional remote validator. -### [csaf_searcher](docs/csaf_searcher.md) -is a tool to search through local advisories. It finds PURLs based on the product ID of an advisory. - ## Tools for advisory providers ### [csaf_provider](docs/csaf_provider.md) @@ -28,6 +25,12 @@ is a tool for testing a CSAF Trusted Provider according to [Section 7 of the CSA ### [csaf_aggregator](docs/csaf_aggregator.md) is a CSAF Aggregator, to list or mirror providers. +## Other stuff + +### [examples](./examples/README.md) +are small examples of how to use `github.com/csaf-poc/csaf_distribution` as an API. +Currently this is a work in progress. They may be extended and/or changed in the future. + ## Setup Binaries for the server side are only available and tested for GNU/Linux-Systems, e.g. Ubuntu LTS. diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000..65c858f --- /dev/null +++ b/examples/README.md @@ -0,0 +1,5 @@ +# API examples + +A currenty very short list how to use `github.com/csaf-poc/csaf_distribution` as a library: + +* (purl_searcher)[./purl_searcher/main.go] is a tool to search through local advisories. It finds PURLs based on the product ID of an advisory. diff --git a/cmd/csaf_searcher/main.go b/examples/purls_searcher/main.go similarity index 100% rename from cmd/csaf_searcher/main.go rename to examples/purls_searcher/main.go From effd4a01af4b5e14fc25200540eb664617a857b3 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 2 Nov 2023 14:24:59 +0100 Subject: [PATCH 036/235] Fix link to development doc page. (#495) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 293c9a0..1e599a5 100644 --- a/README.md +++ b/README.md @@ -71,7 +71,7 @@ Binaries will be placed in directories named like `bin-linux-amd64/` and `bin-wi ### Development -For further details of the development process consult our [development page](./docs/development.md). +For further details of the development process consult our [development page](./docs/Development.md). ## License From 0fe118f7c100d1270566bcf34b97f7be5c09b759 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 2 Nov 2023 17:13:22 +0100 Subject: [PATCH 037/235] Update dependencies --- go.mod | 14 +++++++------- go.sum | 14 ++++++++++++++ 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/go.mod b/go.mod index c89f97b..82a20c5 100644 --- a/go.mod +++ b/go.mod @@ -6,15 +6,15 @@ require ( github.com/BurntSushi/toml v1.3.2 github.com/Intevation/gval v1.3.0 github.com/Intevation/jsonpath v0.2.1 - github.com/ProtonMail/gopenpgp/v2 v2.7.3 + github.com/ProtonMail/gopenpgp/v2 v2.7.4 github.com/PuerkitoBio/goquery v1.8.1 github.com/gofrs/flock v0.8.1 github.com/jessevdk/go-flags v1.5.0 github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 - go.etcd.io/bbolt v1.3.7 - golang.org/x/crypto v0.13.0 - golang.org/x/term v0.12.0 + go.etcd.io/bbolt v1.3.8 + golang.org/x/crypto v0.14.0 + golang.org/x/term v0.13.0 golang.org/x/time v0.3.0 ) @@ -22,10 +22,10 @@ require ( github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c // indirect github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect github.com/andybalholm/cascadia v1.3.2 // indirect - github.com/cloudflare/circl v1.3.3 // indirect + github.com/cloudflare/circl v1.3.6 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.3.1 // indirect - golang.org/x/net v0.15.0 // indirect - golang.org/x/sys v0.12.0 // indirect + golang.org/x/net v0.17.0 // indirect + golang.org/x/sys v0.13.0 // indirect golang.org/x/text v0.13.0 // indirect ) diff --git a/go.sum b/go.sum index cdb5a15..c2643b9 100644 --- a/go.sum +++ b/go.sum @@ -11,6 +11,8 @@ github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ek github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw= github.com/ProtonMail/gopenpgp/v2 v2.7.3 h1:AJu1OI/1UWVYZl6QcCLKGu9OTngS2r52618uGlje84I= github.com/ProtonMail/gopenpgp/v2 v2.7.3/go.mod h1:IhkNEDaxec6NyzSI0PlxapinnwPVIESk8/76da3Ct3g= +github.com/ProtonMail/gopenpgp/v2 v2.7.4 h1:Vz/8+HViFFnf2A6XX8JOvZMrA6F5puwNvvF21O1mRlo= +github.com/ProtonMail/gopenpgp/v2 v2.7.4/go.mod h1:IhkNEDaxec6NyzSI0PlxapinnwPVIESk8/76da3Ct3g= github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM= github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ= github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA= @@ -19,6 +21,8 @@ github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6 github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= github.com/cloudflare/circl v1.3.3 h1:fE/Qz0QdIGqeWfnwq0RE0R7MI51s0M2E4Ga9kq5AEMs= github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= +github.com/cloudflare/circl v1.3.6 h1:/xbKIqSHbZXHwkhbrhrt2YOHIwYJlXH94E3tI/gDlUg= +github.com/cloudflare/circl v1.3.6/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -44,12 +48,16 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ= go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= +go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= +go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= +golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -63,6 +71,8 @@ golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= +golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -80,6 +90,8 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= +golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= @@ -88,6 +100,8 @@ golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= golang.org/x/term v0.12.0 h1:/ZfYdc3zq+q02Rv9vGqTeSItdzZTSNDmfTi0mBAuidU= golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= +golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= From 7f9449a12fbc22f1a4087a03d5ac03e329ca496a Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 2 Nov 2023 18:23:43 +0100 Subject: [PATCH 038/235] Fix pattern matching of purls and document categories. Extract purls from relationships. --- csaf/advisory.go | 4 ++-- examples/purls_searcher/main.go | 11 +++++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index 95a6821..6a8ded8 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -106,7 +106,7 @@ var cpePattern = patternUnmarshal("^(cpe:2\\.3:[aho\\*\\-](:(((\\?*|\\*?)([a-zA- // PURL represents a package URL in an advisory. type PURL string -var pURLPattern = patternUnmarshal(`^pkg:[A-Za-z\\.\\-\\+][A-Za-z0-9\\.\\-\\+]*/.+`) +var pURLPattern = patternUnmarshal("^pkg:[A-Za-z\\.\\-\\+][A-Za-z0-9\\.\\-\\+]*/.+") // XGenericURI represents an identifier for a product. type XGenericURI struct { @@ -223,7 +223,7 @@ type AggregateSeverity struct { // DocumentCategory represents a category of a document. type DocumentCategory string -var documentCategoryPattern = patternUnmarshal(`^[^\\s\\-_\\.](.*[^\\s\\-_\\.])?$`) +var documentCategoryPattern = patternUnmarshal("^[^\\s\\-_\\.](.*[^\\s\\-_\\.])?$") // Version is the version of a document. type Version string diff --git a/examples/purls_searcher/main.go b/examples/purls_searcher/main.go index 80c308e..b01e671 100644 --- a/examples/purls_searcher/main.go +++ b/examples/purls_searcher/main.go @@ -123,4 +123,15 @@ func (uf *urlFinder) findURLs(adv *csaf.Advisory) { for _, b := range tree.Branches { recBranch(b) } + + // Third iterate over relationships. + if tree.RelationShips != nil { + for _, rel := range *tree.RelationShips { + if rel != nil { + if fpn := rel.FullProductName; fpn != nil && fpn.ProductID != nil { + add(slices.Index(uf.ids, *fpn.ProductID), fpn.ProductIdentificationHelper) + } + } + } + } } From 086c4ab48b292b05b2382bf7b26d99ed8b346a0d Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Mon, 6 Nov 2023 14:33:05 +0100 Subject: [PATCH 039/235] Convert a lot of command line arguments to snake case (#498) * Convert a lot of variables to snake case * Add snakecase for variables made out of two words that had it in no version yet (for consistency) * Adjust example files too --------- Co-authored-by: JanHoefelmeyer --- cmd/csaf_aggregator/config.go | 8 +++---- cmd/csaf_checker/config.go | 14 ++++++------- cmd/csaf_downloader/config.go | 32 ++++++++++++++-------------- cmd/csaf_uploader/config.go | 14 ++++++------- cmd/csaf_validator/main.go | 4 ++-- docs/csaf_aggregator.md | 10 ++++----- docs/csaf_checker.md | 16 +++++++------- docs/csaf_downloader.md | 36 ++++++++++++++++---------------- docs/csaf_uploader.md | 14 ++++++------- docs/csaf_validator.md | 4 ++-- docs/examples/aggregator.toml | 6 +++--- docs/scripts/testChecker.sh | 4 ++-- docs/scripts/uploadToProvider.sh | 4 ++-- 13 files changed, 83 insertions(+), 83 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index ecc88dc..edb1fd9 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -54,7 +54,7 @@ type provider struct { UpdateInterval *string `toml:"update_interval"` // IgnorePattern is a list of patterns of advisory URLs to be ignored. - IgnorePattern []string `toml:"ignorepattern"` + IgnorePattern []string `toml:"ignore_pattern"` // ExtraHeader adds extra HTTP header fields to client ExtraHeader http.Header `toml:"header"` @@ -63,7 +63,7 @@ type provider struct { ClientKey *string `toml:"client_key"` ClientPassphrase *string `toml:"client_passphrase"` - Range *models.TimeRange `toml:"timerange"` + Range *models.TimeRange `toml:"time_range"` clientCerts []tls.Certificate ignorePattern filter.PatternMatcher @@ -92,7 +92,7 @@ type config struct { ClientKey *string `toml:"client_key"` ClientPassphrase *string `toml:"client_passphrase"` - Range *models.TimeRange `long:"timerange" short:"t" description:"RANGE of time from which advisories to download" value-name:"RANGE" toml:"timerange"` + Range *models.TimeRange `long:"time_range" short:"t" description:"RANGE of time from which advisories to download" value-name:"RANGE" toml:"time_range"` // LockFile tries to lock to a given file. LockFile *string `toml:"lock_file"` @@ -116,7 +116,7 @@ type config struct { UpdateInterval *string `toml:"update_interval"` // IgnorePattern is a list of patterns of advisory URLs to be ignored. - IgnorePattern []string `toml:"ignorepattern"` + IgnorePattern []string `toml:"ignore_pattern"` // ExtraHeader adds extra HTTP header fields to client ExtraHeader http.Header `toml:"header"` diff --git a/cmd/csaf_checker/config.go b/cmd/csaf_checker/config.go index 4e86a0c..3502443 100644 --- a/cmd/csaf_checker/config.go +++ b/cmd/csaf_checker/config.go @@ -31,18 +31,18 @@ type config struct { //lint:ignore SA5008 We are using choice twice: json, html. Format outputFormat `short:"f" long:"format" choice:"json" choice:"html" description:"Format of report" toml:"format"` Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider" toml:"insecure"` - ClientCert *string `long:"client-cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE" toml:"client_cert"` - ClientKey *string `long:"client-key" description:"TLS client private key file (PEM encoded data)" value-name:"KEY-FILE" toml:"client_key"` - ClientPassphrase *string `long:"client-passphrase" description:"Optional passphrase for the client cert (limited, experimental, see downloader doc)" value-name:"PASSPHRASE" toml:"client_passphrase"` + ClientCert *string `long:"client_cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE" toml:"client_cert"` + ClientKey *string `long:"client_key" description:"TLS client private key file (PEM encoded data)" value-name:"KEY-FILE" toml:"client_key"` + ClientPassphrase *string `long:"client_passphrase" description:"Optional passphrase for the client cert (limited, experimental, see downloader doc)" value-name:"PASSPHRASE" toml:"client_passphrase"` Version bool `long:"version" description:"Display version of the binary" toml:"-"` Verbose bool `long:"verbose" short:"v" description:"Verbose output" toml:"verbose"` Rate *float64 `long:"rate" short:"r" description:"The average upper limit of https operations per second (defaults to unlimited)" toml:"rate"` - Range *models.TimeRange `long:"timerange" short:"t" description:"RANGE of time from which advisories to download" value-name:"RANGE" toml:"timerange"` - IgnorePattern []string `long:"ignorepattern" short:"i" description:"Do not download files if their URLs match any of the given PATTERNs" value-name:"PATTERN" toml:"ignorepattern"` + Range *models.TimeRange `long:"time_range" short:"t" description:"RANGE of time from which advisories to download" value-name:"RANGE" toml:"time_range"` + IgnorePattern []string `long:"ignore_pattern" short:"i" description:"Do not download files if their URLs match any of the given PATTERNs" value-name:"PATTERN" toml:"ignore_pattern"` ExtraHeader http.Header `long:"header" short:"H" description:"One or more extra HTTP header fields" toml:"header"` RemoteValidator string `long:"validator" description:"URL to validate documents remotely" value-name:"URL" toml:"validator"` - RemoteValidatorCache string `long:"validatorcache" description:"FILE to cache remote validations" value-name:"FILE" toml:"validator_cache"` - RemoteValidatorPresets []string `long:"validatorpreset" description:"One or more presets to validate remotely" toml:"validator_preset"` + RemoteValidatorCache string `long:"validator_cache" description:"FILE to cache remote validations" value-name:"FILE" toml:"validator_cache"` + RemoteValidatorPresets []string `long:"validator_preset" description:"One or more presets to validate remotely" toml:"validator_preset"` Config string `short:"c" long:"config" description:"Path to config TOML file" value-name:"TOML-FILE" toml:"-"` diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 0c1ade1..1761d75 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -44,34 +44,34 @@ const ( type config struct { Directory string `short:"d" long:"directory" description:"DIRectory to store the downloaded files in" value-name:"DIR" toml:"directory"` Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider" toml:"insecure"` - IgnoreSignatureCheck bool `long:"ignoresigcheck" description:"Ignore signature check results, just warn on mismatch" toml:"ignoresigcheck"` - ClientCert *string `long:"client-cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE" toml:"client_cert"` - ClientKey *string `long:"client-key" description:"TLS client private key file (PEM encoded data)" value-name:"KEY-FILE" toml:"client_key"` - ClientPassphrase *string `long:"client-passphrase" description:"Optional passphrase for the client cert (limited, experimental, see doc)" value-name:"PASSPHRASE" toml:"client_passphrase"` + IgnoreSignatureCheck bool `long:"ignore_sigcheck" description:"Ignore signature check results, just warn on mismatch" toml:"ignore_sigcheck"` + ClientCert *string `long:"client_cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE" toml:"client_cert"` + ClientKey *string `long:"client_key" description:"TLS client private key file (PEM encoded data)" value-name:"KEY-FILE" toml:"client_key"` + ClientPassphrase *string `long:"client_passphrase" description:"Optional passphrase for the client cert (limited, experimental, see doc)" value-name:"PASSPHRASE" toml:"client_passphrase"` Version bool `long:"version" description:"Display version of the binary" toml:"-"` - NoStore bool `long:"nostore" short:"n" description:"Do not store files" toml:"no_store"` + NoStore bool `long:"no_store" short:"n" description:"Do not store files" toml:"no_store"` Rate *float64 `long:"rate" short:"r" description:"The average upper limit of https operations per second (defaults to unlimited)" toml:"rate"` Worker int `long:"worker" short:"w" description:"NUMber of concurrent downloads" value-name:"NUM" toml:"worker"` - Range *models.TimeRange `long:"timerange" short:"t" description:"RANGE of time from which advisories to download" value-name:"RANGE" toml:"timerange"` + Range *models.TimeRange `long:"time_range" short:"t" description:"RANGE of time from which advisories to download" value-name:"RANGE" toml:"time_range"` Folder string `long:"folder" short:"f" description:"Download into a given subFOLDER" value-name:"FOLDER" toml:"folder"` - IgnorePattern []string `long:"ignorepattern" short:"i" description:"Do not download files if their URLs match any of the given PATTERNs" value-name:"PATTERN" toml:"ignorepattern"` + IgnorePattern []string `long:"ignore_pattern" short:"i" description:"Do not download files if their URLs match any of the given PATTERNs" value-name:"PATTERN" toml:"ignore_pattern"` ExtraHeader http.Header `long:"header" short:"H" description:"One or more extra HTTP header fields" toml:"header"` RemoteValidator string `long:"validator" description:"URL to validate documents remotely" value-name:"URL" toml:"validator"` - RemoteValidatorCache string `long:"validatorcache" description:"FILE to cache remote validations" value-name:"FILE" toml:"validatorcache"` - RemoteValidatorPresets []string `long:"validatorpreset" description:"One or more PRESETS to validate remotely" value-name:"PRESETS" toml:"validatorpreset"` + RemoteValidatorCache string `long:"validator_cache" description:"FILE to cache remote validations" value-name:"FILE" toml:"validator_cache"` + RemoteValidatorPresets []string `long:"validator_preset" description:"One or more PRESETS to validate remotely" value-name:"PRESETS" toml:"validator_preset"` //lint:ignore SA5008 We are using choice twice: strict, unsafe. - ValidationMode validationMode `long:"validationmode" short:"m" choice:"strict" choice:"unsafe" value-name:"MODE" description:"MODE how strict the validation is" toml:"validation_mode"` + ValidationMode validationMode `long:"validation_mode" short:"m" choice:"strict" choice:"unsafe" value-name:"MODE" description:"MODE how strict the validation is" toml:"validation_mode"` - ForwardURL string `long:"forwardurl" description:"URL of HTTP endpoint to forward downloads to" value-name:"URL" toml:"forward_url"` - ForwardHeader http.Header `long:"forwardheader" description:"One or more extra HTTP header fields used by forwarding" toml:"forward_header"` - ForwardQueue int `long:"forwardqueue" description:"Maximal queue LENGTH before forwarder" value-name:"LENGTH" toml:"forward_queue"` - ForwardInsecure bool `long:"forwardinsecure" description:"Do not check TLS certificates from forward endpoint" toml:"forward_insecure"` + ForwardURL string `long:"forward_url" description:"URL of HTTP endpoint to forward downloads to" value-name:"URL" toml:"forward_url"` + ForwardHeader http.Header `long:"forward_header" description:"One or more extra HTTP header fields used by forwarding" toml:"forward_header"` + ForwardQueue int `long:"forward_queue" description:"Maximal queue LENGTH before forwarder" value-name:"LENGTH" toml:"forward_queue"` + ForwardInsecure bool `long:"forward_insecure" description:"Do not check TLS certificates from forward endpoint" toml:"forward_insecure"` - LogFile *string `long:"logfile" description:"FILE to log downloading to" value-name:"FILE" toml:"log_file"` + LogFile *string `long:"log_file" description:"FILE to log downloading to" value-name:"FILE" toml:"log_file"` //lint:ignore SA5008 We are using choice or than once: debug, info, warn, error - LogLevel *options.LogLevel `long:"loglevel" description:"LEVEL of logging details" value-name:"LEVEL" choice:"debug" choice:"info" choice:"warn" choice:"error" toml:"log_level"` + LogLevel *options.LogLevel `long:"log_level" description:"LEVEL of logging details" value-name:"LEVEL" choice:"debug" choice:"info" choice:"warn" choice:"error" toml:"log_level"` Config string `short:"c" long:"config" description:"Path to config TOML file" value-name:"TOML-FILE" toml:"-"` diff --git a/cmd/csaf_uploader/config.go b/cmd/csaf_uploader/config.go index 5892eea..5543813 100644 --- a/cmd/csaf_uploader/config.go +++ b/cmd/csaf_uploader/config.go @@ -35,18 +35,18 @@ type config struct { URL string `short:"u" long:"url" description:"URL of the CSAF provider" value-name:"URL" toml:"url"` //lint:ignore SA5008 We are using choice many times: csaf, white, green, amber, red. TLP string `short:"t" long:"tlp" choice:"csaf" choice:"white" choice:"green" choice:"amber" choice:"red" description:"TLP of the feed" toml:"tlp"` - ExternalSigned bool `short:"x" long:"external-signed" description:"CSAF files are signed externally. Assumes .asc files beside CSAF files." toml:"external_signed"` - NoSchemaCheck bool `short:"s" long:"no-schema-check" description:"Do not check files against CSAF JSON schema locally." toml:"no_schema_check"` + ExternalSigned bool `short:"x" long:"external_signed" description:"CSAF files are signed externally. Assumes .asc files beside CSAF files." toml:"external_signed"` + NoSchemaCheck bool `short:"s" long:"no_schema_check" description:"Do not check files against CSAF JSON schema locally." toml:"no_schema_check"` Key *string `short:"k" long:"key" description:"OpenPGP key to sign the CSAF files" value-name:"KEY-FILE" toml:"key"` Password *string `short:"p" long:"password" description:"Authentication password for accessing the CSAF provider" value-name:"PASSWORD" toml:"password"` Passphrase *string `short:"P" long:"passphrase" description:"Passphrase to unlock the OpenPGP key" value-name:"PASSPHRASE" toml:"passphrase"` - ClientCert *string `long:"client-cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE.crt" toml:"client_cert"` - ClientKey *string `long:"client-key" description:"TLS client private key file (PEM encoded data)" value-name:"KEY-FILE.pem" toml:"client_key"` - ClientPassphrase *string `long:"client-passphrase" description:"Optional passphrase for the client cert (limited, experimental, see downloader doc)" value-name:"PASSPHRASE" toml:"client_passphrase"` + ClientCert *string `long:"client_cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE.crt" toml:"client_cert"` + ClientKey *string `long:"client_key" description:"TLS client private key file (PEM encoded data)" value-name:"KEY-FILE.pem" toml:"client_key"` + ClientPassphrase *string `long:"client_passphrase" description:"Optional passphrase for the client cert (limited, experimental, see downloader doc)" value-name:"PASSPHRASE" toml:"client_passphrase"` - PasswordInteractive bool `short:"i" long:"password-interactive" description:"Enter password interactively" toml:"password_interactive"` - PassphraseInteractive bool `short:"I" long:"passphrase-interactive" description:"Enter OpenPGP key passphrase interactively" toml:"passphrase_interactive"` + PasswordInteractive bool `short:"i" long:"password_interactive" description:"Enter password interactively" toml:"password_interactive"` + PassphraseInteractive bool `short:"I" long:"passphrase_interactive" description:"Enter OpenPGP key passphrase interactively" toml:"passphrase_interactive"` Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider" toml:"insecure"` diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 559562e..7e03268 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -25,8 +25,8 @@ import ( type options struct { Version bool `long:"version" description:"Display version of the binary"` RemoteValidator string `long:"validator" description:"URL to validate documents remotely" value-name:"URL"` - RemoteValidatorCache string `long:"validatorcache" description:"FILE to cache remote validations" value-name:"FILE"` - RemoteValidatorPresets []string `long:"validatorpreset" description:"One or more presets to validate remotely" default:"mandatory"` + RemoteValidatorCache string `long:"validator_cache" description:"FILE to cache remote validations" value-name:"FILE"` + RemoteValidatorPresets []string `long:"validator_preset" description:"One or more presets to validate remotely" default:"mandatory"` Output string `short:"o" long:"output" description:"If a remote validator was used, display AMOUNT ('all', 'important' or 'short') results" value-name:"AMOUNT"` } diff --git a/docs/csaf_aggregator.md b/docs/csaf_aggregator.md index 9769b27..042d321 100644 --- a/docs/csaf_aggregator.md +++ b/docs/csaf_aggregator.md @@ -6,7 +6,7 @@ csaf_aggregator [OPTIONS] Application Options: - -t, --timerange=RANGE RANGE of time from which advisories to download + -t, --time_range=RANGE RANGE of time from which advisories to download -i, --interim Perform an interim scan --version Display version of the binary -c, --config=TOML-FILE Path to config TOML file @@ -104,12 +104,12 @@ lock_file // path to lockfile, to stop other instances if one is n interim_years // limiting the years for which interim documents are searched (default 0) verbose // print more diagnostic output, e.g. https requests (default false) allow_single_provider // debugging option (default false) -ignorepattern // patterns of advisory URLs to be ignored (see checker doc for details) +ignore_pattern // patterns of advisory URLs to be ignored (see checker doc for details) client_cert // path to client certificate to access access-protected advisories client_key // path to client key to access access-protected advisories client_passphrase // optional client cert passphrase (limited, experimental, see downloader doc) header // adds extra HTTP header fields to the client -timerange // Accepted time range of advisories to handle. See downloader docs for details. +time_range // Accepted time range of advisories to handle. See downloader docs for details. ``` Next we have two TOML _tables_: @@ -139,7 +139,7 @@ category update_interval create_service_document categories -ignorepattern +ignore_pattern client_cert client_key client_passphrase @@ -229,7 +229,7 @@ insecure = true # If aggregator.category == "aggregator", set for an entry that should # be listed in addition: category = "lister" -# ignorepattern = [".*white.*", ".*red.*"] +# ignore_pattern = [".*white.*", ".*red.*"] ``` diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index 4253785..58f77ca 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -10,18 +10,18 @@ Application Options: -o, --output=REPORT-FILE File name of the generated report -f, --format=[json|html] Format of report (default: json) --insecure Do not check TLS certificates from provider - --client-cert=CERT-FILE TLS client certificate file (PEM encoded data) - --client-key=KEY-FILE TLS client private key file (PEM encoded data) - --client-passphrase=PASSPHRASE Optional passphrase for the client cert (limited, experimental, see downloader doc) + --client_cert=CERT-FILE TLS client certificate file (PEM encoded data) + --client_key=KEY-FILE TLS client private key file (PEM encoded data) + --client_passphrase=PASSPHRASE Optional passphrase for the client cert (limited, experimental, see downloader doc) --version Display version of the binary -v, --verbose Verbose output -r, --rate= The average upper limit of https operations per second (defaults to unlimited) - -t, --timerange=RANGE RANGE of time from which advisories to download - -i, --ignorepattern=PATTERN Do not download files if their URLs match any of the given PATTERNs + -t, --time_range=RANGE RANGE of time from which advisories to download + -i, --ignore_pattern=PATTERN Do not download files if their URLs match any of the given PATTERNs -H, --header= One or more extra HTTP header fields --validator=URL URL to validate documents remotely - --validatorcache=FILE FILE to cache remote validations - --validatorpreset= One or more presets to validate remotely (default: [mandatory]) + --validator_cache=FILE FILE to cache remote validations + --validator_preset= One or more presets to validate remotely (default: [mandatory]) -c, --config=TOML-FILE Path to config TOML file Help Options: @@ -50,7 +50,7 @@ insecure = false # client_passphrase # not set by default verbose = false # rate # not set by default -# timerange # not set by default +# time_range # not set by default # header # not set by default # validator # not set by default # validator_cache # not set by default diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 0fe4e85..fcf6634 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -9,26 +9,26 @@ csaf_downloader [OPTIONS] domain... Application Options: -d, --directory=DIR DIRectory to store the downloaded files in --insecure Do not check TLS certificates from provider - --ignoresigcheck Ignore signature check results, just warn on mismatch - --client-cert=CERT-FILE TLS client certificate file (PEM encoded data) - --client-key=KEY-FILE TLS client private key file (PEM encoded data) - --client-passphrase=PASSPHRASE Optional passphrase for the client cert (limited, experimental, see doc) + --ignore_sigcheck Ignore signature check results, just warn on mismatch + --client_cert=CERT-FILE TLS client certificate file (PEM encoded data) + --client_key=KEY-FILE TLS client private key file (PEM encoded data) + --client_passphrase=PASSPHRASE Optional passphrase for the client cert (limited, experimental, see doc) --version Display version of the binary - -n, --nostore Do not store files + -n, --no_store Do not store files -r, --rate= The average upper limit of https operations per second (defaults to unlimited) -w, --worker=NUM NUMber of concurrent downloads (default: 2) - -t, --timerange=RANGE RANGE of time from which advisories to download + -t, --time_range=RANGE RANGE of time from which advisories to download -f, --folder=FOLDER Download into a given subFOLDER - -i, --ignorepattern=PATTERN Do not download files if their URLs match any of the given PATTERNs + -i, --ignore_pattern=PATTERN Do not download files if their URLs match any of the given PATTERNs -H, --header= One or more extra HTTP header fields --validator=URL URL to validate documents remotely - --validatorcache=FILE FILE to cache remote validations - --validatorpreset=PRESETS One or more PRESETS to validate remotely (default: [mandatory]) - -m, --validationmode=MODE[strict|unsafe] MODE how strict the validation is (default: strict) - --forwardurl=URL URL of HTTP endpoint to forward downloads to - --forwardheader= One or more extra HTTP header fields used by forwarding - --forwardqueue=LENGTH Maximal queue LENGTH before forwarder (default: 5) - --forwardinsecure Do not check TLS certificates from forward endpoint + --validator_cache=FILE FILE to cache remote validations + --validator_preset=PRESETS One or more PRESETS to validate remotely (default: [mandatory]) + -m, --validation_mode=MODE[strict|unsafe] MODE how strict the validation is (default: strict) + --forward_url=URL URL of HTTP endpoint to forward downloads to + --forward_header= One or more extra HTTP header fields used by forwarding + --forward_queue=LENGTH Maximal queue LENGTH before forwarder (default: 5) + --forward_insecure Do not check TLS certificates from forward endpoint --logfile=FILE FILE to log downloading to (default: downloader.log) --loglevel=LEVEL[debug|info|warn|error] LEVEL of logging details (default: info) -c, --config=TOML-FILE Path to config TOML file @@ -65,13 +65,13 @@ insecure = false ignoresigcheck = false # rate # set to unlimited worker = 2 -# timerange # not set by default +# time_range # not set by default # folder # not set by default -# ignorepattern # not set by default +# ignore_pattern # not set by default # header # not set by default # validator # not set by default -# validatorcache # not set by default -validatorpreset = ["mandatory"] +# validator_cache # not set by default +validator_preset = ["mandatory"] validation_mode = "strict" # forward_url # not set by default # forward_header # not set by default diff --git a/docs/csaf_uploader.md b/docs/csaf_uploader.md index 7ff0db7..0e68aa9 100644 --- a/docs/csaf_uploader.md +++ b/docs/csaf_uploader.md @@ -9,16 +9,16 @@ Application Options: -a, --action=[upload|create] Action to perform (default: upload) -u, --url=URL URL of the CSAF provider (default: https://localhost/cgi-bin/csaf_provider.go) -t, --tlp=[csaf|white|green|amber|red] TLP of the feed (default: csaf) - -x, --external-signed CSAF files are signed externally. Assumes .asc files beside CSAF files. - -s, --no-schema-check Do not check files against CSAF JSON schema locally. + -x, --external_signed CSAF files are signed externally. Assumes .asc files beside CSAF files. + -s, --no_schema_check Do not check files against CSAF JSON schema locally. -k, --key=KEY-FILE OpenPGP key to sign the CSAF files -p, --password=PASSWORD Authentication password for accessing the CSAF provider -P, --passphrase=PASSPHRASE Passphrase to unlock the OpenPGP key - --client-cert=CERT-FILE.crt TLS client certificate file (PEM encoded data) - --client-key=KEY-FILE.pem TLS client private key file (PEM encoded data) - --client-passphrase=PASSPHRASE Optional passphrase for the client cert (limited, experimental, see downloader doc) - -i, --password-interactive Enter password interactively - -I, --passphrase-interactive Enter OpenPGP key passphrase interactively + --client_cert=CERT-FILE.crt TLS client certificate file (PEM encoded data) + --client_key=KEY-FILE.pem TLS client private key file (PEM encoded data) + --client_passphrase=PASSPHRASE Optional passphrase for the client cert (limited, experimental, see downloader doc) + -i, --password_interactive Enter password interactively + -I, --passphrase_interactive Enter OpenPGP key passphrase interactively --insecure Do not check TLS certificates from provider -c, --config=TOML-FILE Path to config TOML file --version Display version of the binary diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index 94cf867..dfa0c9a 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -10,8 +10,8 @@ csaf_validator [OPTIONS] files... Application Options: --version Display version of the binary --validator=URL URL to validate documents remotely - --validatorcache=FILE FILE to cache remote validations - --validatorpreset= One or more presets to validate remotely (default: mandatory) + --validator_cache=FILE FILE to cache remote validations + --validator_preset= One or more presets to validate remotely (default: mandatory) -o AMOUNT, --output=AMOUNT If a remote validator was used, display the results in JSON format AMOUNT: diff --git a/docs/examples/aggregator.toml b/docs/examples/aggregator.toml index 9cf2bf5..ae1723d 100644 --- a/docs/examples/aggregator.toml +++ b/docs/examples/aggregator.toml @@ -10,7 +10,7 @@ insecure = true #interim_years = #passphrase = #write_indices = false -#timerange = +#time_range = # specification requires at least two providers (default), # to override for testing, enable: @@ -32,7 +32,7 @@ insecure = true create_service_document = true # rate = 1.5 # insecure = true -# timerange = +# time_range = [[providers]] name = "local-dev-provider2" @@ -54,4 +54,4 @@ insecure = true # If aggregator.category == "aggreator", set for an entry that should # be listed in addition: category = "lister" -# ignorepattern = [".*white.*", ".*red.*"] +# ignore_pattern = [".*white.*", ".*red.*"] diff --git a/docs/scripts/testChecker.sh b/docs/scripts/testChecker.sh index 37c128b..8c680d4 100755 --- a/docs/scripts/testChecker.sh +++ b/docs/scripts/testChecker.sh @@ -14,8 +14,8 @@ echo '==== run checker (twice)' cd ~/csaf_distribution ./bin-linux-amd64/csaf_checker -f html -o ../checker-results.html --insecure \ - --client-cert ~/devca1/testclient1.crt \ - --client-key ~/devca1/testclient1-key.pem \ + --client_cert ~/devca1/testclient1.crt \ + --client_key ~/devca1/testclient1-key.pem \ --verbose --insecure localhost cat ../checker-results.html diff --git a/docs/scripts/uploadToProvider.sh b/docs/scripts/uploadToProvider.sh index 8353364..e3aac28 100755 --- a/docs/scripts/uploadToProvider.sh +++ b/docs/scripts/uploadToProvider.sh @@ -23,7 +23,7 @@ for f in $(ls csaf_examples); do ../../bin-linux-amd64/csaf_uploader --insecure -P security123 -a upload \ -t ${TLPs[$((COUNTER++ % 4))]} \ -u https://localhost:8443/cgi-bin/csaf_provider.go \ - --client-cert ~/devca1/testclient1.crt \ - --client-key ~/devca1/testclient1-key.pem \ + --client_cert ~/devca1/testclient1.crt \ + --client_key ~/devca1/testclient1-key.pem \ ./csaf_examples/"$f" done From aa3604ac3d7a9af3b4d93693185d4deafd1ec822 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Tue, 7 Nov 2023 09:46:27 +0100 Subject: [PATCH 040/235] API examples: Improved wording in examples/README.md (#499) * Improved wording in examples/README.md * Improve wording * Fix link purl_searcher -> purls_searcher --------- Co-authored-by: JanHoefelmeyer --- examples/README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/examples/README.md b/examples/README.md index 65c858f..d6286fb 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,5 +1,6 @@ # API examples -A currenty very short list how to use `github.com/csaf-poc/csaf_distribution` as a library: +A currenty very short list how to use `github.com/csaf-poc/csaf_distribution` as a library. +As the API is currently WIP these are are likely to be modified or extented. -* (purl_searcher)[./purl_searcher/main.go] is a tool to search through local advisories. It finds PURLs based on the product ID of an advisory. +* [purls_searcher](./purls_searcher/main.go) is a tool to search for PURLs in local advisories by given product IDs. From 21ec5ad8e1a6c993ce8a6e90ec39ad4dfbcef151 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 8 Nov 2023 09:36:20 +0100 Subject: [PATCH 041/235] docs: move link to final CSAF 2.0 in README (#501) --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 1e599a5..782c112 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,9 @@ # csaf_distribution -An implementation of a [CSAF 2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/csd02/csaf-v2.0-csd02.html) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. +An implementation of a +[CSAF 2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html) +trusted provider, checker, aggregator and downloader. +Includes an uploader command line tool for the trusted provider. ## Tools for users ### [csaf_downloader](docs/csaf_downloader.md) From 1579065453c2838035e1a782c024f2884d7743ca Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Wed, 8 Nov 2023 09:39:02 +0100 Subject: [PATCH 042/235] docs: be more consistent with names --- README.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 6377488..dcb5df6 100644 --- a/README.md +++ b/README.md @@ -52,14 +52,14 @@ Download the binaries from the most recent release assets on Github. - Clone the repository `git clone https://github.com/csaf-poc/csaf_distribution.git ` - Build Go components Makefile supplies the following targets: - - Build For GNU/Linux System: `make build_linux` - - Build For Windows System (cross build): `make build_win` - - Build For macOS on Intel Processor (AMD64) (cross build): `make build_mac_amd64` - - Build For macOS on Apple Silicon (ARM64) (cross build): `make build_mac_arm64` - - Build For Linux, Mac and Windows: `make build` - - Build from a specific GitHub tag by passing the intended tag to the `BUILDTAG` variable. + - Build for GNU/Linux system: `make build_linux` + - Build for Windows system (cross build): `make build_win` + - Build for macOS system on Intel Processor (AMD64) (cross build): `make build_mac_amd64` + - Build for macOS system on Apple Silicon (ARM64) (cross build): `make build_mac_arm64` + - Build For GNU/Linux, macOS and Windows: `make build` + - Build from a specific git tag by passing the intended tag to the `BUILDTAG` variable. E.g. `make BUILDTAG=v1.0.0 build` or `make BUILDTAG=1 build_linux`. - The special value `1` means checking out the highest github tag for the build. + The special value `1` means checking out the highest git tag for the build. - Remove the generated binaries und their directories: `make mostlyclean` Binaries will be placed in directories named like `bin-linux-amd64/` and `bin-windows-amd64/`. From 65fae93a812fdbae93f975dcfa61e031d131a419 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 8 Nov 2023 10:40:23 +0100 Subject: [PATCH 043/235] docs: underline that we are _not_ offering an API yet (#502) * docs: move link to final CSAF 2.0 in README * docs: underline that we are _not_ offering an API yet * Grammar fix --------- Co-authored-by: JanHoefelmeyer --- README.md | 6 ++++-- examples/README.md | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index f2690b9..022b149 100644 --- a/README.md +++ b/README.md @@ -31,8 +31,10 @@ is a CSAF Aggregator, to list or mirror providers. ## Other stuff ### [examples](./examples/README.md) -are small examples of how to use `github.com/csaf-poc/csaf_distribution` as an API. -Currently this is a work in progress. They may be extended and/or changed in the future. +are small examples of how to use `github.com/csaf-poc/csaf_distribution` +as an API. Currently this is a work in progress, as usage of this repository +as a library to access is _not officially supported_, e.g. +see https://github.com/csaf-poc/csaf_distribution/issues/367 . ## Setup Binaries for the server side are only available and tested diff --git a/examples/README.md b/examples/README.md index d6286fb..a70ea09 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,6 +1,8 @@ # API examples -A currenty very short list how to use `github.com/csaf-poc/csaf_distribution` as a library. -As the API is currently WIP these are are likely to be modified or extented. +An experimental example of how to use `github.com/csaf-poc/csaf_distribution` +as a library. +As usage of the repository as an API is currently a _work in progress_, +these examples are likely to be changed. * [purls_searcher](./purls_searcher/main.go) is a tool to search for PURLs in local advisories by given product IDs. From e2ab1903e745f3b6642ff97f22457d87f3b4aeb3 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Sun, 12 Nov 2023 10:17:28 +0100 Subject: [PATCH 044/235] Support legacy security.txt location as fallback. --- csaf/providermetaloader.go | 100 ++++++++++++++++++++----------------- 1 file changed, 53 insertions(+), 47 deletions(-) diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index 4e4eb49..62e8876 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -132,8 +132,7 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata } // Next load the PMDs from security.txt - secURL := "https://" + domain + "/.well-known/security.txt" - secResults := pmdl.loadFromSecurity(secURL) + secResults := pmdl.loadFromSecurity(domain) // Filter out the results which are valid. var secGoods []*LoadedProviderMetadata @@ -199,56 +198,63 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata } // loadFromSecurity loads the PMDs mentioned in the security.txt. -func (pmdl *ProviderMetadataLoader) loadFromSecurity(path string) []*LoadedProviderMetadata { +func (pmdl *ProviderMetadataLoader) loadFromSecurity(domain string) []*LoadedProviderMetadata { - res, err := pmdl.client.Get(path) - if err != nil { - pmdl.messages.Add( - HTTPFailed, - fmt.Sprintf("Fetching %q failed: %v", path, err)) - return nil - } - if res.StatusCode != http.StatusOK { - pmdl.messages.Add( - HTTPFailed, - fmt.Sprintf("Fetching %q failed: %s (%d)", path, res.Status, res.StatusCode)) - return nil - } - - // Extract all potential URLs from CSAF. - urls, err := func() ([]string, error) { - defer res.Body.Close() - return ExtractProviderURL(res.Body, true) - }() - - if err != nil { - pmdl.messages.Add( - HTTPFailed, - fmt.Sprintf("Loading %q failed: %v", path, err)) - return nil - } - - var loaded []*LoadedProviderMetadata - - // Load the URLs -nextURL: - for _, url := range urls { - lpmd := pmdl.loadFromURL(url) - // If loading failed note it down. - if !lpmd.Valid() { - pmdl.messages.AppendUnique(lpmd.Messages) + // If .well-known fails try legacy location. + for _, path := range []string{ + "https://" + domain + "/.well-known/security.txt", + "https://" + domain + "/security.txt", + } { + res, err := pmdl.client.Get(path) + if err != nil { + pmdl.messages.Add( + HTTPFailed, + fmt.Sprintf("Fetching %q failed: %v", path, err)) continue } - // Check for duplicates - for _, l := range loaded { - if l == lpmd { - continue nextURL - } + if res.StatusCode != http.StatusOK { + pmdl.messages.Add( + HTTPFailed, + fmt.Sprintf("Fetching %q failed: %s (%d)", path, res.Status, res.StatusCode)) + continue } - loaded = append(loaded, lpmd) - } - return loaded + // Extract all potential URLs from CSAF. + urls, err := func() ([]string, error) { + defer res.Body.Close() + return ExtractProviderURL(res.Body, true) + }() + + if err != nil { + pmdl.messages.Add( + HTTPFailed, + fmt.Sprintf("Loading %q failed: %v", path, err)) + continue + } + + var loaded []*LoadedProviderMetadata + + // Load the URLs + nextURL: + for _, url := range urls { + lpmd := pmdl.loadFromURL(url) + // If loading failed note it down. + if !lpmd.Valid() { + pmdl.messages.AppendUnique(lpmd.Messages) + continue + } + // Check for duplicates + for _, l := range loaded { + if l == lpmd { + continue nextURL + } + } + loaded = append(loaded, lpmd) + } + + return loaded + } + return nil } // loadFromURL loads a provider metadata from a given URL. From 0a2b69bd5510ec7b4f6d9489a9b1b1590dd71226 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 13 Nov 2023 09:59:12 +0100 Subject: [PATCH 045/235] Adjust checker, too. --- cmd/csaf_checker/processor.go | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 8eb6404..39bd141 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1263,9 +1263,26 @@ func (p *processor) checkProviderMetadata(domain string) bool { // the value of this field. Returns an empty string if no error was encountered, // the errormessage otherwise. func (p *processor) checkSecurity(domain string) string { + var msgs []string + // Try well-known first and fall back to legacy when it fails. + for _, folder := range []string{ + "https://" + domain + "/.well-known/", + "https://" + domain + "/", + } { + msg := p.checkSecurityFolder(folder) + if msg == "" { + break + } + msgs = append(msgs, msg) + } + return strings.Join(msgs, "; ") +} + +// checkSecurityFolder checks the security.txt in a given folder. +func (p *processor) checkSecurityFolder(folder string) string { client := p.httpClient() - path := "https://" + domain + "/.well-known/security.txt" + path := folder + "security.txt" res, err := client.Get(path) if err != nil { return fmt.Sprintf("Fetching %s failed: %v", path, err) @@ -1298,7 +1315,7 @@ func (p *processor) checkSecurity(domain string) string { return fmt.Sprintf("CSAF URL '%s' invalid: %v", u, err) } - base, err := url.Parse("https://" + domain + "/.well-known/") + base, err := url.Parse(folder) if err != nil { return err.Error() } From e27d64e42c09445d0037a2c44de51e05bb7e6a11 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 14 Nov 2023 07:55:53 +0100 Subject: [PATCH 046/235] Add path of offending security.txt to error message since now multiple paths are checked --- cmd/csaf_checker/processor.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 39bd141..f32c618 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1273,7 +1273,9 @@ func (p *processor) checkSecurity(domain string) string { if msg == "" { break } - msgs = append(msgs, msg) + // Show which security.txt caused this message + lmsg := folder + "security.txt:" + msg + msgs = append(msgs, lmsg) } return strings.Join(msgs, "; ") } From a4138526277f4e1dbed5603673cd00fb8017685d Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Mon, 20 Nov 2023 11:05:57 +0100 Subject: [PATCH 047/235] Downloader: Only add tlp label to path if no custom directory is configured. Refactor accordingly --- cmd/csaf_downloader/downloader.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index d40f31b..933f2a2 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -631,16 +631,16 @@ nextAdvisory: // Advisories that failed validation are store in a special folder. var newDir string if valStatus != validValidationStatus { - newDir = path.Join(d.cfg.Directory, failedValidationDir, lower) + newDir = path.Join(d.cfg.Directory, failedValidationDir) } else { - newDir = path.Join(d.cfg.Directory, lower) + newDir = d.cfg.Directory } // Do we have a configured destination folder? if d.cfg.Folder != "" { newDir = path.Join(newDir, d.cfg.Folder) } else { - newDir = path.Join(newDir, strconv.Itoa(initialReleaseDate.Year())) + newDir = path.Join(newDir, lower, strconv.Itoa(initialReleaseDate.Year())) } if newDir != lastDir { From 6f8870154c38e00e261d1e8441156f92e0e247e2 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 20 Nov 2023 21:13:24 +0100 Subject: [PATCH 048/235] Break overly long line. Fix typo in comment. --- cmd/csaf_downloader/downloader.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 933f2a2..1799a84 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -621,14 +621,16 @@ nextAdvisory: continue } - if err := d.eval.Extract(`$.document.tracking.initial_release_date`, dateExtract, false, doc); err != nil { + if err := d.eval.Extract( + `$.document.tracking.initial_release_date`, dateExtract, false, doc, + ); err != nil { slog.Warn("Cannot extract initial_release_date from advisory", "url", file.URL()) initialReleaseDate = time.Now() } initialReleaseDate = initialReleaseDate.UTC() - // Advisories that failed validation are store in a special folder. + // Advisories that failed validation are stored in a special folder. var newDir string if valStatus != validValidationStatus { newDir = path.Join(d.cfg.Directory, failedValidationDir) From 9e4a519fff9b03ee19c75f925818c96ee4c1b746 Mon Sep 17 00:00:00 2001 From: tschmidtb51 <65305130+tschmidtb51@users.noreply.github.com> Date: Mon, 20 Nov 2023 21:42:47 +0100 Subject: [PATCH 049/235] Add GH Action execution on PRs (#510) --- .github/workflows/go.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index cff9240..1f277f9 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -4,6 +4,9 @@ on: push: paths: - "**.go" + pull_request: + paths: + - "**.go" jobs: build: From 3935d9aa7ae13b693a62c8221b88a89892eb8168 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 20 Nov 2023 21:53:51 +0100 Subject: [PATCH 050/235] Update cmd/csaf_checker/processor.go Co-authored-by: tschmidtb51 <65305130+tschmidtb51@users.noreply.github.com> --- cmd/csaf_checker/processor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index f32c618..304d68f 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1274,7 +1274,7 @@ func (p *processor) checkSecurity(domain string) string { break } // Show which security.txt caused this message - lmsg := folder + "security.txt:" + msg + lmsg := folder + "security.txt: " + msg msgs = append(msgs, lmsg) } return strings.Join(msgs, "; ") From 318c898a83e2f261b4c66961404110e45c3508d3 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 21 Nov 2023 12:09:37 +0100 Subject: [PATCH 051/235] Change: cmd/csaf_checker/processor.go: Seperate check of security.txt under .well-known and legacy location into different messages to improve readability --- cmd/csaf_checker/processor.go | 55 +++++++++++++++++++++++------------ 1 file changed, 36 insertions(+), 19 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 304d68f..ac9cca4 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1262,22 +1262,20 @@ func (p *processor) checkProviderMetadata(domain string) bool { // It checks the existence of the CSAF field in the file content and tries to fetch // the value of this field. Returns an empty string if no error was encountered, // the errormessage otherwise. -func (p *processor) checkSecurity(domain string) string { - var msgs []string - // Try well-known first and fall back to legacy when it fails. - for _, folder := range []string{ - "https://" + domain + "/.well-known/", - "https://" + domain + "/", - } { - msg := p.checkSecurityFolder(folder) - if msg == "" { - break - } - // Show which security.txt caused this message - lmsg := folder + "security.txt: " + msg - msgs = append(msgs, lmsg) +func (p *processor) checkSecurity(domain string, legacy bool) (int, string) { + folder := "https://" + domain + "/" + if !legacy { + folder = folder + ".well-known/" } - return strings.Join(msgs, "; ") + msg := p.checkSecurityFolder(folder) + if msg == "" { + if !legacy { + return 0, "Found valid security.txt within the well-known directory" + } else { + return 2, "Found valid security.txt in the legacy location" + } + } + return 1, folder + "security.txt: " + msg } // checkSecurityFolder checks the security.txt in a given folder. @@ -1410,7 +1408,13 @@ func (p *processor) checkWellknown(domain string) string { func (p *processor) checkWellknownSecurityDNS(domain string) error { warningsW := p.checkWellknown(domain) - warningsS := p.checkSecurity(domain) + // Security check for well known (default) and legacy location + warningsS, sDMessage := p.checkSecurity(domain, false) + // if the security.txt under .well-known was not okay + sLMessage := "" + if warningsS == 1 { + warningsS, sLMessage = p.checkSecurity(domain, true) + } warningsD := p.checkDNS(domain) p.badWellknownMetadata.use() @@ -1418,17 +1422,30 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { p.badDNSPath.use() var kind MessageType - if warningsS == "" || warningsD == "" || warningsW == "" { + if warningsS != 1 || warningsD == "" || warningsW == "" { kind = WarnType } else { kind = ErrorType } + // Info, Warning or Error depending on kind and warningS + kindSD := kind + if warningsS == 0 { + kindSD = InfoType + } + kindSL := kind + if warningsS == 2 { + kindSL = InfoType + } + if warningsW != "" { p.badWellknownMetadata.add(kind, warningsW) } - if warningsS != "" { - p.badSecurity.add(kind, warningsS) + p.badSecurity.add(kindSD, sDMessage) + // only if the well-known security.txt was not successful: + // report about the legacy location + if warningsS != 0 { + p.badSecurity.add(kindSL, sLMessage) } if warningsD != "" { p.badDNSPath.add(kind, warningsD) From 4a9f8a6f031240cd50b12a4fbff4526cfa9dc792 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 21 Nov 2023 12:14:45 +0100 Subject: [PATCH 052/235] Change: cmd/csaf_checker/processor.go: Improve comment --- cmd/csaf_checker/processor.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index ac9cca4..2e61ae8 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1411,6 +1411,7 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { // Security check for well known (default) and legacy location warningsS, sDMessage := p.checkSecurity(domain, false) // if the security.txt under .well-known was not okay + // check for a security.txt within its legacy location sLMessage := "" if warningsS == 1 { warningsS, sLMessage = p.checkSecurity(domain, true) From fb7c77b419099c8c215f1d0c432b6417b41dbcc4 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 21 Nov 2023 13:45:46 +0100 Subject: [PATCH 053/235] Remove unnecessary else block --- cmd/csaf_checker/processor.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 2e61ae8..2a5161c 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1271,9 +1271,8 @@ func (p *processor) checkSecurity(domain string, legacy bool) (int, string) { if msg == "" { if !legacy { return 0, "Found valid security.txt within the well-known directory" - } else { - return 2, "Found valid security.txt in the legacy location" } + return 2, "Found valid security.txt in the legacy location" } return 1, folder + "security.txt: " + msg } From a6bf44f7cced151123b1b47111d958276e1a40c7 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 22 Nov 2023 08:17:05 +0100 Subject: [PATCH 054/235] Removed impossible to achieve condition in reporters --- cmd/csaf_checker/reporters.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/cmd/csaf_checker/reporters.go b/cmd/csaf_checker/reporters.go index 51731e1..c707a14 100644 --- a/cmd/csaf_checker/reporters.go +++ b/cmd/csaf_checker/reporters.go @@ -251,10 +251,6 @@ func (r *securityReporter) report(p *processor, domain *Domain) { req.message(WarnType, "Performed no in-depth test of security.txt.") return } - if len(p.badSecurity) == 0 { - req.message(InfoType, "Found CSAF entry in security.txt.") - return - } req.Messages = p.badSecurity } From 91ab7f6b1cbc0160457dcd92cfe4ae62997a7cde Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Tue, 28 Nov 2023 10:37:16 +0100 Subject: [PATCH 055/235] Chance supported minimal Go version back to 1.20 (#514) --- README.md | 2 +- cmd/csaf_downloader/config.go | 3 ++- cmd/csaf_downloader/downloader.go | 3 ++- cmd/csaf_downloader/forwarder.go | 8 ++++++-- cmd/csaf_downloader/forwarder_test.go | 3 ++- cmd/csaf_downloader/main.go | 3 ++- cmd/csaf_downloader/stats.go | 2 +- cmd/csaf_downloader/stats_test.go | 3 ++- docs/Development.md | 5 +++++ examples/purls_searcher/main.go | 7 +++++-- go.mod | 5 +++-- go.sum | 21 +++++---------------- internal/options/log.go | 3 ++- internal/options/log_test.go | 3 ++- 14 files changed, 40 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index 022b149..54daf87 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ Download the binaries from the most recent release assets on Github. ### Build from sources -- A recent version of **Go** (1.21+) should be installed. [Go installation](https://go.dev/doc/install) +- A recent version of **Go** (1.20+) should be installed. [Go installation](https://go.dev/doc/install) - Clone the repository `git clone https://github.com/csaf-poc/csaf_distribution.git ` diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 1761d75..367780f 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -13,12 +13,13 @@ import ( "fmt" "io" "log" - "log/slog" "net/http" "os" "path/filepath" "time" + "golang.org/x/exp/slog" + "github.com/csaf-poc/csaf_distribution/v3/internal/certs" "github.com/csaf-poc/csaf_distribution/v3/internal/filter" "github.com/csaf-poc/csaf_distribution/v3/internal/models" diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 1799a84..7fa0c7c 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -19,7 +19,6 @@ import ( "fmt" "hash" "io" - "log/slog" "net/http" "net/url" "os" @@ -30,6 +29,8 @@ import ( "sync" "time" + "golang.org/x/exp/slog" + "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/time/rate" diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index 8ceb0e5..eda6595 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -12,13 +12,14 @@ import ( "bytes" "crypto/tls" "io" - "log/slog" "mime/multipart" "net/http" "os" "path/filepath" "strings" + "golang.org/x/exp/slog" + "github.com/csaf-poc/csaf_distribution/v3/internal/misc" "github.com/csaf-poc/csaf_distribution/v3/util" ) @@ -57,7 +58,10 @@ type forwarder struct { // newForwarder creates a new forwarder. func newForwarder(cfg *config) *forwarder { - queue := max(1, cfg.ForwardQueue) + queue := cfg.ForwardQueue + if queue < 1 { + queue = 1 + } return &forwarder{ cfg: cfg, cmds: make(chan func(*forwarder), queue), diff --git a/cmd/csaf_downloader/forwarder_test.go b/cmd/csaf_downloader/forwarder_test.go index c7f8634..dc515ad 100644 --- a/cmd/csaf_downloader/forwarder_test.go +++ b/cmd/csaf_downloader/forwarder_test.go @@ -14,7 +14,6 @@ import ( "encoding/json" "errors" "io" - "log/slog" "mime" "mime/multipart" "net/http" @@ -23,6 +22,8 @@ import ( "strings" "testing" + "golang.org/x/exp/slog" + "github.com/csaf-poc/csaf_distribution/v3/internal/options" "github.com/csaf-poc/csaf_distribution/v3/util" ) diff --git a/cmd/csaf_downloader/main.go b/cmd/csaf_downloader/main.go index 7c0e564..daff163 100644 --- a/cmd/csaf_downloader/main.go +++ b/cmd/csaf_downloader/main.go @@ -11,10 +11,11 @@ package main import ( "context" - "log/slog" "os" "os/signal" + "golang.org/x/exp/slog" + "github.com/csaf-poc/csaf_distribution/v3/internal/options" ) diff --git a/cmd/csaf_downloader/stats.go b/cmd/csaf_downloader/stats.go index 6bafbf7..237420a 100644 --- a/cmd/csaf_downloader/stats.go +++ b/cmd/csaf_downloader/stats.go @@ -8,7 +8,7 @@ package main -import "log/slog" +import "golang.org/x/exp/slog" // stats contains counters of the downloads. type stats struct { diff --git a/cmd/csaf_downloader/stats_test.go b/cmd/csaf_downloader/stats_test.go index 69567ab..b3ab914 100644 --- a/cmd/csaf_downloader/stats_test.go +++ b/cmd/csaf_downloader/stats_test.go @@ -11,8 +11,9 @@ package main import ( "bytes" "encoding/json" - "log/slog" "testing" + + "golang.org/x/exp/slog" ) func TestStatsAdd(t *testing.T) { diff --git a/docs/Development.md b/docs/Development.md index 0a7004a..218cb7e 100644 --- a/docs/Development.md +++ b/docs/Development.md @@ -1,5 +1,10 @@ # Development +## Supported Go versions + +We support the latest version and the one before +the latest version of Go (currently 1.21 and 1.20). + ## Generated files Some source code files are machine generated. At the moment these are only diff --git a/examples/purls_searcher/main.go b/examples/purls_searcher/main.go index b01e671..a91470b 100644 --- a/examples/purls_searcher/main.go +++ b/examples/purls_searcher/main.go @@ -7,9 +7,10 @@ import ( "fmt" "log" "os" - "slices" "strings" + "golang.org/x/exp/slices" + "github.com/csaf-poc/csaf_distribution/v3/csaf" ) @@ -70,7 +71,9 @@ func newURLFinder(ids []string) *urlFinder { // clear resets the url finder after a run on an advisory. func (uf *urlFinder) clear() { - clear(uf.urls) + for i := range uf.urls { + uf.urls[i] = uf.urls[i][:0] + } } // dumpURLs dumps the found URLs to stdout. diff --git a/go.mod b/go.mod index 82a20c5..469c8a3 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/csaf-poc/csaf_distribution/v3 -go 1.21 +go 1.20 require ( github.com/BurntSushi/toml v1.3.2 @@ -14,6 +14,7 @@ require ( github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 go.etcd.io/bbolt v1.3.8 golang.org/x/crypto v0.14.0 + golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa golang.org/x/term v0.13.0 golang.org/x/time v0.3.0 ) @@ -26,6 +27,6 @@ require ( github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.3.1 // indirect golang.org/x/net v0.17.0 // indirect - golang.org/x/sys v0.13.0 // indirect + golang.org/x/sys v0.14.0 // indirect golang.org/x/text v0.13.0 // indirect ) diff --git a/go.sum b/go.sum index c2643b9..3a101d4 100644 --- a/go.sum +++ b/go.sum @@ -9,8 +9,6 @@ github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c h1:kMFnB0vCcX github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ekTTXpdwKYF8eBlsYsDVoggDAuAjoK66k= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw= -github.com/ProtonMail/gopenpgp/v2 v2.7.3 h1:AJu1OI/1UWVYZl6QcCLKGu9OTngS2r52618uGlje84I= -github.com/ProtonMail/gopenpgp/v2 v2.7.3/go.mod h1:IhkNEDaxec6NyzSI0PlxapinnwPVIESk8/76da3Ct3g= github.com/ProtonMail/gopenpgp/v2 v2.7.4 h1:Vz/8+HViFFnf2A6XX8JOvZMrA6F5puwNvvF21O1mRlo= github.com/ProtonMail/gopenpgp/v2 v2.7.4/go.mod h1:IhkNEDaxec6NyzSI0PlxapinnwPVIESk8/76da3Ct3g= github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM= @@ -19,7 +17,6 @@ github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEq github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss= github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU= github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= -github.com/cloudflare/circl v1.3.3 h1:fE/Qz0QdIGqeWfnwq0RE0R7MI51s0M2E4Ga9kq5AEMs= github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= github.com/cloudflare/circl v1.3.6 h1:/xbKIqSHbZXHwkhbrhrt2YOHIwYJlXH94E3tI/gDlUg= github.com/cloudflare/circl v1.3.6/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= @@ -28,6 +25,7 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc= github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= @@ -44,20 +42,17 @@ github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFR github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ= -go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= -golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck= -golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa h1:FRnLl4eNAQl8hwxVVC17teOw8kdjVDVAiFMtgUdTSRQ= +golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -69,8 +64,6 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= -golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8= -golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -88,18 +81,14 @@ golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= -golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= -golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= +golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= -golang.org/x/term v0.12.0 h1:/ZfYdc3zq+q02Rv9vGqTeSItdzZTSNDmfTi0mBAuidU= -golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= diff --git a/internal/options/log.go b/internal/options/log.go index c8e828d..226072e 100644 --- a/internal/options/log.go +++ b/internal/options/log.go @@ -9,8 +9,9 @@ package options import ( - "log/slog" "strings" + + "golang.org/x/exp/slog" ) // LogLevel implements a helper type to be used in configurations. diff --git a/internal/options/log_test.go b/internal/options/log_test.go index 6c93865..2272f0f 100644 --- a/internal/options/log_test.go +++ b/internal/options/log_test.go @@ -9,8 +9,9 @@ package options import ( - "log/slog" "testing" + + "golang.org/x/exp/slog" ) func TestMarshalFlag(t *testing.T) { From b457dc872fa4f8031731509ba2ee31e49cba6a3c Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Fri, 1 Dec 2023 11:45:09 +0100 Subject: [PATCH 056/235] Remove usage of slices in enum generator. (#516) --- csaf/generate_cvss_enums.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/csaf/generate_cvss_enums.go b/csaf/generate_cvss_enums.go index 911b64d..eaa2cb9 100644 --- a/csaf/generate_cvss_enums.go +++ b/csaf/generate_cvss_enums.go @@ -17,7 +17,7 @@ import ( "go/format" "log" "os" - "slices" + "sort" "strings" "text/template" ) @@ -135,7 +135,7 @@ func main() { defs = append(defs, k) } } - slices.Sort(defs) + sort.Strings(defs) var source bytes.Buffer From 9073a8a282a4efd149acd88aeb23b3d1004cf1c7 Mon Sep 17 00:00:00 2001 From: Juan Ariza Toledano Date: Fri, 1 Dec 2023 15:31:25 +0100 Subject: [PATCH 057/235] feat: Add function to find product identification helpers inspecting the tree (#505) * feat: Add function to find product identification helpers inspecting the tree Signed-off-by: juan131 * fix: simplify unit tests Signed-off-by: juan131 * fix: also iterate over relationships Signed-off-by: juan131 * fix: adapt example to use new library function Signed-off-by: juan131 * Separate collecting and visiting of the product id helpers. --------- Signed-off-by: juan131 Co-authored-by: Sascha L. Teichmann --- csaf/util.go | 61 +++++++++++ csaf/util_test.go | 182 ++++++++++++++++++++++++++++++++ examples/purls_searcher/main.go | 111 +++---------------- 3 files changed, 258 insertions(+), 96 deletions(-) create mode 100644 csaf/util_test.go diff --git a/csaf/util.go b/csaf/util.go index f192f09..f8e34be 100644 --- a/csaf/util.go +++ b/csaf/util.go @@ -36,3 +36,64 @@ func ExtractProviderURL(r io.Reader, all bool) ([]string, error) { } return urls, nil } + +// CollectProductIdentificationHelpers returns a slice of all ProductIdentificationHelper +// for a given ProductID. +func (pt *ProductTree) CollectProductIdentificationHelpers(id ProductID) []*ProductIdentificationHelper { + var helpers []*ProductIdentificationHelper + pt.FindProductIdentificationHelpers( + id, func(helper *ProductIdentificationHelper) { + helpers = append(helpers, helper) + }) + return helpers +} + +// FindProductIdentificationHelpers calls visit on all ProductIdentificationHelper +// for a given ProductID by iterating over all full product names and branches +// recursively available in the ProductTree. +func (pt *ProductTree) FindProductIdentificationHelpers( + id ProductID, + visit func(*ProductIdentificationHelper), +) { + // Iterate over all full product names + if fpns := pt.FullProductNames; fpns != nil { + for _, fpn := range *fpns { + if fpn != nil && + fpn.ProductID != nil && *fpn.ProductID == id && + fpn.ProductIdentificationHelper != nil { + visit(fpn.ProductIdentificationHelper) + } + } + } + + // Iterate over branches recursively + var recBranch func(b *Branch) + recBranch = func(b *Branch) { + if b == nil { + return + } + if fpn := b.Product; fpn != nil && + fpn.ProductID != nil && *fpn.ProductID == id && + fpn.ProductIdentificationHelper != nil { + visit(fpn.ProductIdentificationHelper) + } + for _, c := range b.Branches { + recBranch(c) + } + } + for _, b := range pt.Branches { + recBranch(b) + } + + // Iterate over relationships + if rels := pt.RelationShips; rels != nil { + for _, rel := range *rels { + if rel != nil { + if fpn := rel.FullProductName; fpn != nil && fpn.ProductID != nil && + *fpn.ProductID == id && fpn.ProductIdentificationHelper != nil { + visit(fpn.ProductIdentificationHelper) + } + } + } + } +} diff --git a/csaf/util_test.go b/csaf/util_test.go new file mode 100644 index 0000000..0d5ff49 --- /dev/null +++ b/csaf/util_test.go @@ -0,0 +1,182 @@ +// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package csaf + +import ( + "reflect" + "testing" +) + +func TestProductTree_FindProductIdentificationHelpers(t *testing.T) { + type fields struct { + Branches Branches + FullProductNames *FullProductNames + RelationShips *Relationships + } + type args struct { + id ProductID + } + tests := []struct { + name string + fields fields + args args + want []*ProductIdentificationHelper + }{ + { + name: "empty product tree", + args: args{ + id: "CSAFPID-0001", + }, + want: nil, + }, + { + name: "product tree with matching full product names", + fields: fields{ + FullProductNames: &FullProductNames{{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }}, + }, + args: args{ + id: "CSAFPID-0001", + }, + want: []*ProductIdentificationHelper{{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }}, + }, + { + name: "product tree with no matching full product names", + fields: fields{ + FullProductNames: &FullProductNames{{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }}, + }, + args: args{ + id: "CSAFPID-0002", + }, + want: nil, + }, + { + name: "product tree with matching branches", + fields: fields{ + Branches: Branches{{ + Name: &[]string{"beta"}[0], + Product: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }, + Branches: Branches{{ + Name: &[]string{"beta-2"}[0], + Product: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta-2:*:*:*:*:*:*"}[0], + }, + }, + }}, + }}, + }, + args: args{ + id: "CSAFPID-0001", + }, + want: []*ProductIdentificationHelper{{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, { + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta-2:*:*:*:*:*:*"}[0], + }}, + }, + { + name: "product tree with no matching branches", + fields: fields{ + Branches: Branches{{ + Name: &[]string{"beta"}[0], + Product: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }, + Branches: Branches{{ + Name: &[]string{"beta-2"}[0], + Product: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta-2:*:*:*:*:*:*"}[0], + }, + }, + }}, + }}, + }, + args: args{ + id: "CSAFPID-0002", + }, + want: nil, + }, + { + name: "product tree with matching relationships", + fields: fields{ + RelationShips: &Relationships{{ + FullProductName: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }, + }}, + }, + args: args{ + id: "CSAFPID-0001", + }, + want: []*ProductIdentificationHelper{{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }}, + }, + { + name: "product tree with no matching relationships", + fields: fields{ + RelationShips: &Relationships{{ + FullProductName: &FullProductName{ + ProductID: &[]ProductID{"CSAFPID-0001"}[0], + ProductIdentificationHelper: &ProductIdentificationHelper{ + CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0], + }, + }, + }}, + }, + args: args{ + id: "CSAFPID-0002", + }, + want: nil, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + pt := &ProductTree{ + Branches: test.fields.Branches, + FullProductNames: test.fields.FullProductNames, + RelationShips: test.fields.RelationShips, + } + if got := pt.CollectProductIdentificationHelpers(test.args.id); !reflect.DeepEqual(got, test.want) { + tt.Errorf("ProductTree.FindProductIdentificationHelpers() = %v, want %v", + got, test.want) + } + }) + } +} diff --git a/examples/purls_searcher/main.go b/examples/purls_searcher/main.go index a91470b..c1ec3e1 100644 --- a/examples/purls_searcher/main.go +++ b/examples/purls_searcher/main.go @@ -9,9 +9,8 @@ import ( "os" "strings" - "golang.org/x/exp/slices" - "github.com/csaf-poc/csaf_distribution/v3/csaf" + "github.com/csaf-poc/csaf_distribution/v3/util" ) func main() { @@ -35,106 +34,26 @@ func main() { // run prints PURLs belonging to the given Product IDs. func run(files []string, ids string) error { - - uf := newURLFinder(strings.Split(ids, ",")) - for _, file := range files { adv, err := csaf.LoadAdvisory(file) if err != nil { return fmt.Errorf("loading %q failed: %w", file, err) } - uf.findURLs(adv) - uf.dumpURLs() - uf.clear() + + for _, id := range strings.Split(ids, ",") { + already := util.Set[csaf.PURL]{} + i := 0 + adv.ProductTree.FindProductIdentificationHelpers( + csaf.ProductID(id), + func(h *csaf.ProductIdentificationHelper) { + if h.PURL != nil && !already.Contains(*h.PURL) { + already.Add(*h.PURL) + i++ + fmt.Printf("%d. %s\n", i, *h.PURL) + } + }) + } } return nil } - -// urlFinder helps to find the URLs of a set of product ids in advisories. -type urlFinder struct { - ids []csaf.ProductID - urls [][]csaf.PURL -} - -// newURLFinder creates a new urlFinder for given ids. -func newURLFinder(ids []string) *urlFinder { - uf := &urlFinder{ - ids: make([]csaf.ProductID, len(ids)), - urls: make([][]csaf.PURL, len(ids)), - } - for i := range uf.ids { - uf.ids[i] = csaf.ProductID(ids[i]) - } - return uf -} - -// clear resets the url finder after a run on an advisory. -func (uf *urlFinder) clear() { - for i := range uf.urls { - uf.urls[i] = uf.urls[i][:0] - } -} - -// dumpURLs dumps the found URLs to stdout. -func (uf *urlFinder) dumpURLs() { - for i, urls := range uf.urls { - if len(urls) == 0 { - continue - } - fmt.Printf("Found URLs for %s:\n", uf.ids[i]) - for j, url := range urls { - fmt.Printf("%d. %s\n", j+1, url) - } - } -} - -// findURLs find the URLs in an advisory. -func (uf *urlFinder) findURLs(adv *csaf.Advisory) { - tree := adv.ProductTree - if tree == nil { - return - } - - // If we have found it and we have a valid URL add unique. - add := func(idx int, h *csaf.ProductIdentificationHelper) { - if idx != -1 && h != nil && h.PURL != nil && - !slices.Contains(uf.urls[idx], *h.PURL) { - uf.urls[idx] = append(uf.urls[idx], *h.PURL) - } - } - - // First iterate over full product names. - if names := tree.FullProductNames; names != nil { - for _, name := range *names { - if name != nil && name.ProductID != nil { - add(slices.Index(uf.ids, *name.ProductID), name.ProductIdentificationHelper) - } - } - } - - // Second traverse the branches recursively. - var recBranch func(*csaf.Branch) - recBranch = func(b *csaf.Branch) { - if p := b.Product; p != nil && p.ProductID != nil { - add(slices.Index(uf.ids, *p.ProductID), p.ProductIdentificationHelper) - } - for _, c := range b.Branches { - recBranch(c) - } - } - for _, b := range tree.Branches { - recBranch(b) - } - - // Third iterate over relationships. - if tree.RelationShips != nil { - for _, rel := range *tree.RelationShips { - if rel != nil { - if fpn := rel.FullProductName; fpn != nil && fpn.ProductID != nil { - add(slices.Index(uf.ids, *fpn.ProductID), fpn.ProductIdentificationHelper) - } - } - } - } -} From 03e418182d76d36a309912fd6694136f123d9007 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 4 Dec 2023 11:31:14 +0100 Subject: [PATCH 058/235] Advisories: Time filter download by 'updated' field in ROLIE entries. (#519) * Use 'updated' field of ROLIE field entries to time filter downloads. * More suited variable naming --- cmd/csaf_checker/processor.go | 8 +------- csaf/advisories.go | 2 +- internal/models/models.go | 8 -------- internal/models/models_test.go | 24 ------------------------ 4 files changed, 2 insertions(+), 40 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 2a5161c..7eaefef 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -33,7 +33,6 @@ import ( "golang.org/x/time/rate" "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" "github.com/csaf-poc/csaf_distribution/v3/util" ) @@ -548,7 +547,7 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { // Filter if we have date checking. if accept := p.cfg.Range; accept != nil { - if pub := time.Time(entry.Published); !pub.IsZero() && !accept.Contains(pub) { + if t := time.Time(entry.Updated); !t.IsZero() && !accept.Contains(t) { return } } @@ -667,11 +666,6 @@ func (p *processor) integrity( var folderYear *int if m := yearFromURL.FindStringSubmatch(u); m != nil { year, _ := strconv.Atoi(m[1]) - // Check if the year is in the accepted time interval. - if accept := p.cfg.Range; accept != nil && - !accept.Intersects(models.Year(year)) { - continue - } folderYear = &year } diff --git a/csaf/advisories.go b/csaf/advisories.go index 9c22ed3..5b85690 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -316,7 +316,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( // Filter if we have date checking. if afp.AgeAccept != nil { - if pub := time.Time(entry.Published); !pub.IsZero() && !afp.AgeAccept(pub) { + if t := time.Time(entry.Updated); !t.IsZero() && !afp.AgeAccept(t) { return } } diff --git a/internal/models/models.go b/internal/models/models.go index 00fead3..520cd9c 100644 --- a/internal/models/models.go +++ b/internal/models/models.go @@ -31,14 +31,6 @@ func NewTimeInterval(a, b time.Time) TimeRange { return TimeRange{a, b} } -// Year returns the time range for a given year. -func Year(year int) TimeRange { - return TimeRange{ - time.Date(year, time.January, 1, 0, 0, 0, 0, time.UTC), - time.Date(year, time.December, 31, 23, 59, 59, int(time.Second-time.Nanosecond), time.UTC), - } -} - // guessDate tries to guess an RFC 3339 date time from a given string. func guessDate(s string) (time.Time, bool) { for _, layout := range []string{ diff --git a/internal/models/models_test.go b/internal/models/models_test.go index 0217bf7..a40100f 100644 --- a/internal/models/models_test.go +++ b/internal/models/models_test.go @@ -173,27 +173,3 @@ func TestTimeRangeIntersects(t *testing.T) { } } } - -// TestTimeRangeYear checks if the Year construction works. -func TestTimeRangeYear(t *testing.T) { - var ( - year = Year(1984) - first = time.Date(1984, time.January, 1, 0, 0, 0, 0, time.UTC) - before = first.Add(-time.Nanosecond) - after = time.Date(1984+1, time.January, 1, 0, 0, 0, 0, time.UTC) - last = after.Add(-time.Nanosecond) - ) - for _, x := range []struct { - t time.Time - expected bool - }{ - {t: first, expected: true}, - {t: before, expected: false}, - {t: last, expected: true}, - {t: after, expected: false}, - } { - if got := year.Contains(x.t); got != x.expected { - t.Fatalf("%v: got %t expected %t", x.t, got, x.expected) - } - } -} From 6c8b3757aacef4e45d6fccf818a4218add03eed6 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Thu, 7 Dec 2023 16:12:26 +0100 Subject: [PATCH 059/235] Older version (#513) * Add go_legacy.yml to check for compatibility with older go versions * Remove tests already done in go.yml * fix: Update actions, use stable/oldstable in actions --------- Co-authored-by: JanHoefelmeyer --- .github/workflows/go.yml | 6 +++--- .github/workflows/go_legacy.yml | 26 ++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/go_legacy.yml diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 1f277f9..95ee8c7 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -12,12 +12,12 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Go - uses: actions/setup-go@v2 + uses: actions/setup-go@v4 with: - go-version: 1.21.0 + go-version: 'stable' - name: Build run: go build -v ./cmd/... diff --git a/.github/workflows/go_legacy.yml b/.github/workflows/go_legacy.yml new file mode 100644 index 0000000..a86368d --- /dev/null +++ b/.github/workflows/go_legacy.yml @@ -0,0 +1,26 @@ +name: Go + +on: + push: + paths: + - "**.go" + pull_request: + paths: + - "**.go" + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version: 'oldstable' + + - name: Build + run: go build -v ./cmd/... + + - name: Tests + run: go test -v ./... From 9a1c66eb8ead1a7075c3ee00bbef3bb97a469883 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 15 Jan 2024 08:59:58 +0100 Subject: [PATCH 060/235] checker: Ensure that the processor is reset before checking each domain. (#523) --- cmd/csaf_checker/processor.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 7eaefef..8f3a6c1 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -202,8 +202,8 @@ func (p *processor) close() { } } -// clean clears the fields values of the given processor. -func (p *processor) clean() { +// reset clears the fields values of the given processor. +func (p *processor) reset() { p.redirects = nil p.noneTLS = nil for k := range p.alreadyChecked { @@ -247,6 +247,8 @@ func (p *processor) run(domains []string) (*Report, error) { } for _, d := range domains { + p.reset() + if !p.checkProviderMetadata(d) { // We cannot build a report if the provider metadata cannot be parsed. log.Printf("Could not parse the Provider-Metadata.json of: %s\n", d) @@ -287,7 +289,6 @@ func (p *processor) run(domains []string) (*Report, error) { domain.Passed = rules.eval(p) report.Domains = append(report.Domains, domain) - p.clean() } return &report, nil From b858640fc173be3b4373694b036c83bd5fcc26a8 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Fri, 23 Feb 2024 14:48:39 +0100 Subject: [PATCH 061/235] docs: fix minor typo in test-keys/Readme.md (#525) --- docs/test-keys/Readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/test-keys/Readme.md b/docs/test-keys/Readme.md index 5b422fd..94c8d8f 100644 --- a/docs/test-keys/Readme.md +++ b/docs/test-keys/Readme.md @@ -1,6 +1,6 @@ OpenPGP key-pairs for testing only. -Note: as the keypairs wre fully public, **do not use them for production**. +Note: as the keypairs are fully public, **do not use them for production**. Create your own keypair(s) with the security properties and operational security you need. From 51a681ef3101506ec402e826064bc28f00a94250 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 27 Feb 2024 09:44:41 +0100 Subject: [PATCH 062/235] docs: improve link to CSAF standard documents * Add overview link to csaf.io * Fix link to specification and add link to the latest errata document. --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 54daf87..69601cd 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ # csaf_distribution -An implementation of a -[CSAF 2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html) +An implementation of a [CSAF](https://csaf.io/) +[2.0 Spec](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) +([Errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. From e658738b568ba6c6173325ce4b1081c8142b081c Mon Sep 17 00:00:00 2001 From: Christian Banse Date: Thu, 18 Apr 2024 19:51:25 +0200 Subject: [PATCH 063/235] Added support for structured logging in `csaf_aggretator` This PR adds structured logging for the aggregator service. Currently, only the text handler is used, but I can extend this to use the JSON handler as well. In this case, probably some code that is shared between the aggregator and the downloader would need to be moved to a common package. I was also wondering, whether this repo is moving to Go 1.21 at the future, since `slog` was introduced in to the standard lib in 1.21. So currently, this still relies on the `x/exp` package. Fixes #462 --- cmd/csaf_aggregator/config.go | 21 ++++++++++--- cmd/csaf_aggregator/full.go | 38 ++++++++++++++++------- cmd/csaf_aggregator/indices.go | 3 +- cmd/csaf_aggregator/interim.go | 10 +++--- cmd/csaf_aggregator/lazytransaction.go | 5 +-- cmd/csaf_aggregator/main.go | 11 ++++--- cmd/csaf_aggregator/mirror.go | 43 ++++++++++++-------------- cmd/csaf_aggregator/processor.go | 32 +++++++++++-------- csaf/advisories.go | 12 +++++++ go.mod | 2 +- go.sum | 2 ++ internal/options/options.go | 10 ++++++ 12 files changed, 122 insertions(+), 67 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index edb1fd9..2a2bef2 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -12,7 +12,6 @@ import ( "crypto/tls" "errors" "fmt" - "log" "net/http" "os" "runtime" @@ -26,6 +25,7 @@ import ( "github.com/csaf-poc/csaf_distribution/v3/internal/models" "github.com/csaf-poc/csaf_distribution/v3/internal/options" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" "golang.org/x/time/rate" ) @@ -178,9 +178,11 @@ func (p *provider) ageAccept(c *config) func(time.Time) bool { } if c.Verbose { - log.Printf( - "Setting up filter to accept advisories within time range %s to %s\n", - r[0].Format(time.RFC3339), r[1].Format(time.RFC3339)) + slog.Debug( + "Setting up filter to accept advisories within time range", + "from", r[0].Format(time.RFC3339), + "to", r[1].Format(time.RFC3339), + ) } return r.Contains } @@ -393,6 +395,17 @@ func (c *config) setDefaults() { } } +// prepareLogging sets up the structured logging. +func (cfg *config) prepareLogging() error { + ho := slog.HandlerOptions{ + Level: slog.LevelDebug, + } + handler := slog.NewTextHandler(os.Stdout, &ho) + logger := slog.New(handler) + slog.SetDefault(logger) + return nil +} + // compileIgnorePatterns compiles the configured patterns to be ignored. func (p *provider) compileIgnorePatterns() error { pm, err := filter.NewPatternMatcher(p.IgnorePattern) diff --git a/cmd/csaf_aggregator/full.go b/cmd/csaf_aggregator/full.go index 600c650..2165397 100644 --- a/cmd/csaf_aggregator/full.go +++ b/cmd/csaf_aggregator/full.go @@ -11,7 +11,6 @@ package main import ( "errors" "fmt" - "log" "os" "path/filepath" "strings" @@ -20,6 +19,7 @@ import ( "github.com/csaf-poc/csaf_distribution/v3/csaf" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" ) type fullJob struct { @@ -29,11 +29,13 @@ type fullJob struct { err error } -// setupProviderFull fetches the provider-metadate.json for a specific provider. +// setupProviderFull fetches the provider-metadata.json for a specific provider. func (w *worker) setupProviderFull(provider *provider) error { - log.Printf("worker #%d: %s (%s)\n", - w.num, provider.Name, provider.Domain) - + w.log.Info("Setting up provider", + "provider", slog.GroupValue( + slog.String("name", provider.Name), + slog.String("domain", provider.Domain), + )) w.dir = "" w.provider = provider @@ -55,7 +57,7 @@ func (w *worker) setupProviderFull(provider *provider) error { "provider-metadata.json has %d validation issues", len(errors)) } - log.Printf("provider-metadata: %s\n", w.loc) + w.log.Info("Using provider-metadata", "url", w.loc) return nil } @@ -79,7 +81,7 @@ func (w *worker) fullWork(wg *sync.WaitGroup, jobs <-chan *fullJob) { func (p *processor) full() error { if p.cfg.runAsMirror() { - log.Println("Running in aggregator mode") + p.log.Info("Running in aggregator mode") // check if we need to setup a remote validator if p.cfg.RemoteValidatorOptions != nil { @@ -96,16 +98,18 @@ func (p *processor) full() error { }() } } else { - log.Println("Running in lister mode") + p.log.Info("Running in lister mode") } queue := make(chan *fullJob) var wg sync.WaitGroup - log.Printf("Starting %d workers.\n", p.cfg.Workers) + p.log.Info("Starting workers...", "num", p.cfg.Workers) + for i := 1; i <= p.cfg.Workers; i++ { wg.Add(1) w := newWorker(i, p) + go w.fullWork(&wg, queue) } @@ -135,12 +139,22 @@ func (p *processor) full() error { for i := range jobs { j := &jobs[i] if j.err != nil { - log.Printf("error: '%s' failed: %v\n", j.provider.Name, j.err) + p.log.Error("Job execution failed", + slog.Group("job", + slog.Group("provider"), + "name", j.provider.Name, + ), + "err", j.err, + ) continue } if j.aggregatorProvider == nil { - log.Printf( - "error: '%s' does not produce any result.\n", j.provider.Name) + p.log.Error("Job did not produce any result", + slog.Group("job", + slog.Group("provider"), + "name", j.provider.Name, + ), + ) continue } diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 69954bd..cc91b45 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -12,7 +12,6 @@ import ( "bufio" "encoding/csv" "fmt" - "log" "os" "path/filepath" "sort" @@ -377,7 +376,7 @@ func (w *worker) writeIndices() error { } for label, summaries := range w.summaries { - log.Printf("%s: %d\n", label, len(summaries)) + w.log.Debug("Writing indices", "label", label, "summaries.num", len(summaries)) if err := w.writeInterims(label, summaries); err != nil { return err } diff --git a/cmd/csaf_aggregator/interim.go b/cmd/csaf_aggregator/interim.go index bdd5ebc..cf4a937 100644 --- a/cmd/csaf_aggregator/interim.go +++ b/cmd/csaf_aggregator/interim.go @@ -17,7 +17,6 @@ import ( "errors" "fmt" "io" - "log" "net/http" "os" "path/filepath" @@ -102,12 +101,12 @@ func (w *worker) checkInterims( // XXX: Should we return an error here? for _, e := range errors { - log.Printf("validation error: %s: %v\n", url, e) + w.log.Error("validation error", "url", url, "err", e) } // We need to write the changed content. - // This will start the transcation if not already started. + // This will start the transaction if not already started. dst, err := tx.Dst() if err != nil { return nil, err @@ -159,8 +158,7 @@ func (w *worker) checkInterims( // setupProviderInterim prepares the worker for a specific provider. func (w *worker) setupProviderInterim(provider *provider) { - log.Printf("worker #%d: %s (%s)\n", - w.num, provider.Name, provider.Domain) + w.log.Info("Setting up worker", provider.Name, provider.Domain) w.dir = "" w.provider = provider @@ -262,7 +260,7 @@ func (p *processor) interim() error { queue := make(chan *interimJob) var wg sync.WaitGroup - log.Printf("Starting %d workers.\n", p.cfg.Workers) + p.log.Info("Starting workers...", "num", p.cfg.Workers) for i := 1; i <= p.cfg.Workers; i++ { wg.Add(1) w := newWorker(i, p) diff --git a/cmd/csaf_aggregator/lazytransaction.go b/cmd/csaf_aggregator/lazytransaction.go index a2b1e94..458002f 100644 --- a/cmd/csaf_aggregator/lazytransaction.go +++ b/cmd/csaf_aggregator/lazytransaction.go @@ -9,11 +9,11 @@ package main import ( - "log" "os" "path/filepath" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" ) type lazyTransaction struct { @@ -85,7 +85,8 @@ func (lt *lazyTransaction) commit() error { os.RemoveAll(lt.dst) return err } - log.Printf("Move %q -> %q\n", symlink, lt.src) + + slog.Debug("Moving directory", "from", symlink, "to", lt.src) if err := os.Rename(symlink, lt.src); err != nil { os.RemoveAll(lt.dst) return err diff --git a/cmd/csaf_aggregator/main.go b/cmd/csaf_aggregator/main.go index 74a9670..b738a7e 100644 --- a/cmd/csaf_aggregator/main.go +++ b/cmd/csaf_aggregator/main.go @@ -15,7 +15,9 @@ import ( "path/filepath" "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gofrs/flock" + "golang.org/x/exp/slog" ) func lock(lockFile *string, fn func() error) error { @@ -44,8 +46,9 @@ func lock(lockFile *string, fn func() error) error { func main() { _, cfg, err := parseArgsConfig() - options.ErrorCheck(err) - options.ErrorCheck(cfg.prepare()) - p := processor{cfg: cfg} - options.ErrorCheck(lock(cfg.LockFile, p.process)) + cfg.prepareLogging() + options.ErrorCheckStructured(err) + options.ErrorCheckStructured(cfg.prepare()) + p := processor{cfg: cfg, log: slog.Default()} + options.ErrorCheckStructured(lock(cfg.LockFile, p.process)) } diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 3acb48e..0779a5b 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -16,7 +16,7 @@ import ( "encoding/json" "fmt" "io" - "log" + "log/slog" "net/http" "net/url" "os" @@ -47,7 +47,7 @@ func (w *worker) mirror() (*csaf.AggregatorCSAFProvider, error) { if err != nil && w.dir != "" { // If something goes wrong remove the debris. if err := os.RemoveAll(w.dir); err != nil { - log.Printf("error: %v\n", err) + w.log.Error("Could not remove directory", "path", w.dir, "err", err) } } return result, err @@ -166,7 +166,7 @@ func (w *worker) writeProviderMetadata() error { {Expr: `$.public_openpgp_keys`, Action: util.ReMarshalMatcher(&pm.PGPKeys)}, }, w.metadataProvider); err != nil { // only log the errors - log.Printf("extracting data from orignal provider failed: %v\n", err) + w.log.Error("Extracting data from original provider failed", "err", err) } // We are mirroring the remote public keys, too. @@ -196,11 +196,11 @@ func (w *worker) mirrorPGPKeys(pm *csaf.ProviderMetadata) error { for i := range pm.PGPKeys { pgpKey := &pm.PGPKeys[i] if pgpKey.URL == nil { - log.Printf("ignoring PGP key without URL: %s\n", pgpKey.Fingerprint) + w.log.Warn("Ignoring PGP key without URL", "fingerprint", pgpKey.Fingerprint) continue } if _, err := hex.DecodeString(string(pgpKey.Fingerprint)); err != nil { - log.Printf("ignoring PGP with invalid fingerprint: %s\n", *pgpKey.URL) + w.log.Warn("Ignoring PGP key with invalid fingerprint", "url", *pgpKey.URL) continue } @@ -344,7 +344,7 @@ func (w *worker) doMirrorTransaction() error { // Check if there is a sysmlink already. target := filepath.Join(w.processor.cfg.Folder, w.provider.Name) - log.Printf("target: '%s'\n", target) + w.log.Debug("Checking for path existance", "path", target) exists, err := util.PathExists(target) if err != nil { @@ -359,7 +359,7 @@ func (w *worker) doMirrorTransaction() error { } } - log.Printf("sym link: %s -> %s\n", w.dir, target) + w.log.Debug("Creating symbol", "from", w.dir, "to", target) // Create a new symlink if err := os.Symlink(w.dir, target); err != nil { @@ -368,7 +368,7 @@ func (w *worker) doMirrorTransaction() error { } // Move the symlink - log.Printf("Move: %s -> %s\n", target, webTarget) + w.log.Debug("Moving symbol", "from", target, "to", webTarget) if err := os.Rename(target, webTarget); err != nil { os.RemoveAll(w.dir) return err @@ -499,14 +499,14 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) u, err := url.Parse(file.URL()) if err != nil { - log.Printf("error: %s\n", err) + w.log.Error("Could not parse advisory file URL", "err", err) continue } // Should we ignore this advisory? if w.provider.ignoreURL(file.URL(), w.processor.cfg) { if w.processor.cfg.Verbose { - log.Printf("Ignoring %s: %q\n", w.provider.Name, file.URL()) + w.log.Info("Ignoring advisory", slog.Group("provider", "name", w.provider.Name), "file", file) } continue } @@ -514,7 +514,7 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) // Ignore not conforming filenames. filename := filepath.Base(u.Path) if !util.ConformingFileName(filename) { - log.Printf("Not conforming filename %q. Ignoring.\n", filename) + w.log.Warn("Ignoring advisory because of non-conforming filename", "filename", filename) continue } @@ -531,19 +531,18 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) } if err := downloadJSON(w.client, file.URL(), download); err != nil { - log.Printf("error: %v\n", err) + w.log.Error("Error while downloading JSON", "err", err) continue } // Check against CSAF schema. errors, err := csaf.ValidateCSAF(advisory) if err != nil { - log.Printf("error: %s: %v", file, err) + w.log.Error("Error while validating CSAF schema", "err", err) continue } if len(errors) > 0 { - log.Printf("CSAF file %s has %d validation errors.\n", - file, len(errors)) + w.log.Error("CSAF file has validation errors", "num.errors", len(errors), "file", file) continue } @@ -551,29 +550,27 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) if rmv := w.processor.remoteValidator; rmv != nil { rvr, err := rmv.Validate(advisory) if err != nil { - log.Printf("Calling remote validator failed: %s\n", err) + w.log.Error("Calling remote validator failed", "err", err) continue } if !rvr.Valid { - log.Printf( - "CSAF file %s does not validate remotely.\n", file) + w.log.Error("CSAF file does not validate remotely", "file", file.URL()) continue } } sum, err := csaf.NewAdvisorySummary(w.expr, advisory) if err != nil { - log.Printf("error: %s: %v\n", file, err) + w.log.Error("Error while creating new advisory", "file", file, "err", err) continue } if util.CleanFileName(sum.ID) != filename { - log.Printf("ID %q does not match filename %s", - sum.ID, filename) + w.log.Error("ID mismatch", "id", sum.ID, "filename", filename) } if err := w.extractCategories(label, advisory); err != nil { - log.Printf("error: %s: %v\n", file, err) + w.log.Error("Could not extract categories", "file", file, "err", err) continue } @@ -624,7 +621,7 @@ func (w *worker) downloadSignatureOrSign(url, fname string, data []byte) error { if err != nil { if err != errNotFound { - log.Printf("error: %s: %v\n", url, err) + w.log.Error("Could not find signature URL", "url", url, "err", err) } // Sign it our self. if sig, err = w.sign(data); err != nil { diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index ccd5062..9a71b90 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -10,14 +10,14 @@ package main import ( "fmt" - "log" "os" "path/filepath" - "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" "github.com/csaf-poc/csaf_distribution/v3/util" + + "github.com/ProtonMail/gopenpgp/v2/crypto" + "golang.org/x/exp/slog" ) type processor struct { @@ -26,6 +26,9 @@ type processor struct { // remoteValidator is a globally configured remote validator. remoteValidator csaf.RemoteValidator + + // log is the structured logger for the whole processor. + log *slog.Logger } type summary struct { @@ -48,6 +51,7 @@ type worker struct { dir string // Directory to store data to. summaries map[string][]summary // the summaries of the advisories. categories map[string]util.Set[string] // the categories per label. + log *slog.Logger // the structured logger, supplied with the worker number. } func newWorker(num int, processor *processor) *worker { @@ -55,6 +59,7 @@ func newWorker(num int, processor *processor) *worker { num: num, processor: processor, expr: util.NewPathEval(), + log: processor.log.With(slog.Int("worker", num)), } } @@ -86,9 +91,10 @@ func (w *worker) locateProviderMetadata(domain string) error { if w.processor.cfg.Verbose { for i := range lpmd.Messages { - log.Printf( - "Loading provider-metadata.json of %q: %s\n", - domain, lpmd.Messages[i].Message) + w.log.Info( + "Loading provider-metadata.json", + "domain", domain, + "message", lpmd.Messages[i].Message) } } @@ -141,7 +147,7 @@ func (p *processor) removeOrphans() error { fi, err := entry.Info() if err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not retrieve file info", "err", err) continue } @@ -153,13 +159,13 @@ func (p *processor) removeOrphans() error { d := filepath.Join(path, entry.Name()) r, err := filepath.EvalSymlinks(d) if err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not evaluate symlink", "err", err) continue } fd, err := os.Stat(r) if err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not retrieve file stats", "err", err) continue } @@ -169,18 +175,18 @@ func (p *processor) removeOrphans() error { } // Remove the link. - log.Printf("removing link %s -> %s\n", d, r) + p.log.Info("Removing link", "path", fmt.Sprintf("%s -> %s", d, r)) if err := os.Remove(d); err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not remove symlink", "err", err) continue } // Only remove directories which are in our folder. if rel, err := filepath.Rel(prefix, r); err == nil && rel == filepath.Base(r) { - log.Printf("removing directory %s\n", r) + p.log.Info("Remove directory", "path", r) if err := os.RemoveAll(r); err != nil { - log.Printf("error: %v\n", err) + p.log.Error("Could not remove directory", "err", err) } } } diff --git a/csaf/advisories.go b/csaf/advisories.go index 5b85690..abd55c6 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -13,6 +13,7 @@ import ( "fmt" "io" "log" + "log/slog" "net/http" "net/url" "strings" @@ -23,6 +24,7 @@ import ( // AdvisoryFile constructs the urls of a remote file. type AdvisoryFile interface { + slog.LogValuer URL() string SHA256URL() string SHA512URL() string @@ -46,6 +48,11 @@ func (paf PlainAdvisoryFile) SHA512URL() string { return string(paf) + ".sha512" // SignURL returns the URL of signature file of this advisory. func (paf PlainAdvisoryFile) SignURL() string { return string(paf) + ".asc" } +// LogValue implements [slog.LogValuer] +func (paf PlainAdvisoryFile) LogValue() slog.Value { + return slog.GroupValue(slog.String("url", paf.URL())) +} + // HashedAdvisoryFile is a more involed version of checkFile. // Here each component can be given explicitly. // If a component is not given it is constructed by @@ -71,6 +78,11 @@ func (haf HashedAdvisoryFile) SHA512URL() string { return haf.name(2, ".sha512") // SignURL returns the URL of signature file of this advisory. func (haf HashedAdvisoryFile) SignURL() string { return haf.name(3, ".asc") } +// LogValue implements [slog.LogValuer] +func (haf HashedAdvisoryFile) LogValue() slog.Value { + return slog.GroupValue(slog.String("url", haf.URL())) +} + // AdvisoryFileProcessor implements the extraction of // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { diff --git a/go.mod b/go.mod index 469c8a3..1f6f51d 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 go.etcd.io/bbolt v1.3.8 golang.org/x/crypto v0.14.0 - golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa + golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f golang.org/x/term v0.13.0 golang.org/x/time v0.3.0 ) diff --git a/go.sum b/go.sum index 3a101d4..cbbb382 100644 --- a/go.sum +++ b/go.sum @@ -53,6 +53,8 @@ golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa h1:FRnLl4eNAQl8hwxVVC17teOw8kdjVDVAiFMtgUdTSRQ= golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f h1:99ci1mjWVBWwJiEKYY6jWa4d2nTQVIEhZIptnrVb1XY= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= diff --git a/internal/options/options.go b/internal/options/options.go index 961b4b4..ffd699b 100644 --- a/internal/options/options.go +++ b/internal/options/options.go @@ -19,6 +19,7 @@ import ( "github.com/mitchellh/go-homedir" "github.com/csaf-poc/csaf_distribution/v3/util" + "golang.org/x/exp/slog" ) // Parser helps parsing command line arguments and loading @@ -147,3 +148,12 @@ func ErrorCheck(err error) { log.Fatalf("error: %v\n", err) } } + +// ErrorCheck checks if err is not nil and terminates +// the program if so. +func ErrorCheckStructured(err error) { + if err != nil { + slog.Error("Error while executing program", "err", err) + os.Exit(1) + } +} From fb1cf32e17f2dd007efc979c8cbb3fc80786f2e6 Mon Sep 17 00:00:00 2001 From: Christian Banse Date: Fri, 19 Apr 2024 09:35:36 +0200 Subject: [PATCH 064/235] Fixed linting errors --- cmd/csaf_aggregator/config.go | 2 +- internal/options/options.go | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 2a2bef2..f1e602d 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -396,7 +396,7 @@ func (c *config) setDefaults() { } // prepareLogging sets up the structured logging. -func (cfg *config) prepareLogging() error { +func (c *config) prepareLogging() error { ho := slog.HandlerOptions{ Level: slog.LevelDebug, } diff --git a/internal/options/options.go b/internal/options/options.go index ffd699b..d8574ff 100644 --- a/internal/options/options.go +++ b/internal/options/options.go @@ -149,8 +149,9 @@ func ErrorCheck(err error) { } } -// ErrorCheck checks if err is not nil and terminates -// the program if so. +// ErrorCheckStructured checks if err is not nil and terminates the program if +// so. This is similar to [ErrorCheck], but uses [slog] instead of the +// non-structured Go logging. func ErrorCheckStructured(err error) { if err != nil { slog.Error("Error while executing program", "err", err) From 39a29e39f1272bee8794413b1372cf3a592fc3c6 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Mon, 22 Apr 2024 13:11:30 +0200 Subject: [PATCH 065/235] Change Licenses from MIT to Apache 2.0 --- LICENSES/Apache-2.0.txt | 73 +++++++++++++++++++++ LICENSES/MIT.txt | 21 ------ Makefile | 6 +- README.md | 12 +++- cmd/csaf_aggregator/client.go | 6 +- cmd/csaf_aggregator/config.go | 6 +- cmd/csaf_aggregator/files.go | 6 +- cmd/csaf_aggregator/full.go | 6 +- cmd/csaf_aggregator/indices.go | 6 +- cmd/csaf_aggregator/interim.go | 6 +- cmd/csaf_aggregator/lazytransaction.go | 6 +- cmd/csaf_aggregator/lister.go | 6 +- cmd/csaf_aggregator/main.go | 6 +- cmd/csaf_aggregator/mirror.go | 6 +- cmd/csaf_aggregator/processor.go | 6 +- cmd/csaf_checker/config.go | 6 +- cmd/csaf_checker/links.go | 6 +- cmd/csaf_checker/links_test.go | 6 +- cmd/csaf_checker/main.go | 6 +- cmd/csaf_checker/processor.go | 6 +- cmd/csaf_checker/report.go | 6 +- cmd/csaf_checker/reporters.go | 6 +- cmd/csaf_checker/roliecheck.go | 6 +- cmd/csaf_checker/rules.go | 6 +- cmd/csaf_downloader/config.go | 6 +- cmd/csaf_downloader/downloader.go | 6 +- cmd/csaf_downloader/forwarder.go | 6 +- cmd/csaf_downloader/forwarder_test.go | 6 +- cmd/csaf_downloader/main.go | 6 +- cmd/csaf_downloader/stats.go | 6 +- cmd/csaf_downloader/stats_test.go | 6 +- cmd/csaf_provider/actions.go | 6 +- cmd/csaf_provider/config.go | 6 +- cmd/csaf_provider/controller.go | 6 +- cmd/csaf_provider/create.go | 6 +- cmd/csaf_provider/files.go | 6 +- cmd/csaf_provider/indices.go | 6 +- cmd/csaf_provider/main.go | 6 +- cmd/csaf_provider/mux.go | 6 +- cmd/csaf_provider/rolie.go | 6 +- cmd/csaf_provider/tmpl/create.html | 6 +- cmd/csaf_provider/tmpl/index.html | 6 +- cmd/csaf_provider/tmpl/upload.html | 6 +- cmd/csaf_provider/transaction.go | 6 +- cmd/csaf_uploader/config.go | 6 +- cmd/csaf_uploader/main.go | 6 +- cmd/csaf_uploader/processor.go | 6 +- cmd/csaf_validator/main.go | 6 +- csaf/advisories.go | 6 +- csaf/advisory.go | 6 +- csaf/cvss20enums.go | 6 +- csaf/cvss3enums.go | 6 +- csaf/doc.go | 6 +- csaf/generate_cvss_enums.go | 12 ++-- csaf/models.go | 6 +- csaf/providermetaloader.go | 6 +- csaf/remotevalidation.go | 6 +- csaf/rolie.go | 6 +- csaf/summary.go | 6 +- csaf/util.go | 6 +- csaf/util_test.go | 6 +- csaf/validation.go | 6 +- docs/scripts/DNSConfigForItest.sh | 6 +- docs/scripts/TLSClientConfigsForITest.sh | 6 +- docs/scripts/TLSConfigsForITest.sh | 6 +- docs/scripts/createCCForITest.sh | 6 +- docs/scripts/createRootCAForITest.sh | 6 +- docs/scripts/createWebserverCertForITest.sh | 6 +- docs/scripts/downloadExamples.sh | 6 +- docs/scripts/setupProviderForITest.sh | 6 +- docs/scripts/setupValidationService.sh | 6 +- docs/scripts/testAggregator.sh | 6 +- docs/scripts/testChecker.sh | 6 +- docs/scripts/testDownloader.sh | 6 +- docs/scripts/uploadToProvider.sh | 6 +- internal/certs/certs.go | 6 +- internal/certs/certs_test.go | 6 +- internal/filter/filter.go | 6 +- internal/filter/filter_test.go | 6 +- internal/misc/doc.go | 6 +- internal/misc/mime.go | 6 +- internal/misc/mime_test.go | 6 +- internal/models/models.go | 6 +- internal/models/models_test.go | 6 +- internal/options/log.go | 6 +- internal/options/log_test.go | 6 +- internal/options/options.go | 6 +- internal/options/options_test.go | 6 +- util/client.go | 6 +- util/csv.go | 6 +- util/doc.go | 6 +- util/file.go | 6 +- util/file_test.go | 6 +- util/hash.go | 6 +- util/json.go | 6 +- util/set.go | 6 +- util/url.go | 6 +- util/version.go | 6 +- 98 files changed, 372 insertions(+), 310 deletions(-) create mode 100644 LICENSES/Apache-2.0.txt delete mode 100644 LICENSES/MIT.txt diff --git a/LICENSES/Apache-2.0.txt b/LICENSES/Apache-2.0.txt new file mode 100644 index 0000000..137069b --- /dev/null +++ b/LICENSES/Apache-2.0.txt @@ -0,0 +1,73 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/LICENSES/MIT.txt b/LICENSES/MIT.txt deleted file mode 100644 index 57165e6..0000000 --- a/LICENSES/MIT.txt +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/Makefile b/Makefile index 19e31c7..b4b3964 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ -# This file is Free Software under the MIT License -# without warranty, see README.md and LICENSES/MIT.txt for details. +# This file is Free Software under the Apache-2.0 License +# without warranty, see README.md and LICENSES/Apache-2.0.txt for details. # -# SPDX-License-Identifier: MIT +# SPDX-License-Identifier: Apache-2.0 # # SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) # Software-Engineering: 2021 Intevation GmbH diff --git a/README.md b/README.md index 69601cd..78342f5 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,13 @@ + + # csaf_distribution An implementation of a [CSAF](https://csaf.io/) @@ -90,7 +100,7 @@ For further details of the development process consult our [development page](./ ## License -- `csaf_distribution` is licensed as Free Software under MIT License. +- `csaf_distribution` is licensed as Free Software under the terms of the [Apache License, Version 2.0](./LICENSES/Apache-2.0.txt). - See the specific source files for details, the license itself can be found in the directory `LICENSES/`. diff --git a/cmd/csaf_aggregator/client.go b/cmd/csaf_aggregator/client.go index deb108a..8200d34 100644 --- a/cmd/csaf_aggregator/client.go +++ b/cmd/csaf_aggregator/client.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index edb1fd9..711238c 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/files.go b/cmd/csaf_aggregator/files.go index adf04aa..18ccbb6 100644 --- a/cmd/csaf_aggregator/files.go +++ b/cmd/csaf_aggregator/files.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/full.go b/cmd/csaf_aggregator/full.go index 600c650..fb8e0f9 100644 --- a/cmd/csaf_aggregator/full.go +++ b/cmd/csaf_aggregator/full.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 69954bd..598685c 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/interim.go b/cmd/csaf_aggregator/interim.go index bdd5ebc..692841f 100644 --- a/cmd/csaf_aggregator/interim.go +++ b/cmd/csaf_aggregator/interim.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/lazytransaction.go b/cmd/csaf_aggregator/lazytransaction.go index a2b1e94..16470d3 100644 --- a/cmd/csaf_aggregator/lazytransaction.go +++ b/cmd/csaf_aggregator/lazytransaction.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/lister.go b/cmd/csaf_aggregator/lister.go index a3bfd29..4d758e4 100644 --- a/cmd/csaf_aggregator/lister.go +++ b/cmd/csaf_aggregator/lister.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/main.go b/cmd/csaf_aggregator/main.go index 74a9670..d5d04e5 100644 --- a/cmd/csaf_aggregator/main.go +++ b/cmd/csaf_aggregator/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 3acb48e..32e0cbf 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index ccd5062..fb9acde 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/config.go b/cmd/csaf_checker/config.go index 3502443..ac9ce62 100644 --- a/cmd/csaf_checker/config.go +++ b/cmd/csaf_checker/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_checker/links.go b/cmd/csaf_checker/links.go index 5784489..0456ace 100644 --- a/cmd/csaf_checker/links.go +++ b/cmd/csaf_checker/links.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/links_test.go b/cmd/csaf_checker/links_test.go index 3229511..8abf4e6 100644 --- a/cmd/csaf_checker/links_test.go +++ b/cmd/csaf_checker/links_test.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/main.go b/cmd/csaf_checker/main.go index 73a5cce..752fdf8 100644 --- a/cmd/csaf_checker/main.go +++ b/cmd/csaf_checker/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 8f3a6c1..451a315 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_checker/report.go b/cmd/csaf_checker/report.go index 2b53bb2..9b5251b 100644 --- a/cmd/csaf_checker/report.go +++ b/cmd/csaf_checker/report.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_checker/reporters.go b/cmd/csaf_checker/reporters.go index c707a14..016d371 100644 --- a/cmd/csaf_checker/reporters.go +++ b/cmd/csaf_checker/reporters.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index 94b1c2f..53d1150 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_checker/rules.go b/cmd/csaf_checker/rules.go index 6981b6b..eadbbb2 100644 --- a/cmd/csaf_checker/rules.go +++ b/cmd/csaf_checker/rules.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 367780f..39a4d05 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 7fa0c7c..38203bf 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022, 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2022, 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index eda6595..13957d5 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/forwarder_test.go b/cmd/csaf_downloader/forwarder_test.go index dc515ad..edfa476 100644 --- a/cmd/csaf_downloader/forwarder_test.go +++ b/cmd/csaf_downloader/forwarder_test.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/main.go b/cmd/csaf_downloader/main.go index daff163..9364b88 100644 --- a/cmd/csaf_downloader/main.go +++ b/cmd/csaf_downloader/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_downloader/stats.go b/cmd/csaf_downloader/stats.go index 237420a..94a38de 100644 --- a/cmd/csaf_downloader/stats.go +++ b/cmd/csaf_downloader/stats.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_downloader/stats_test.go b/cmd/csaf_downloader/stats_test.go index b3ab914..79406c7 100644 --- a/cmd/csaf_downloader/stats_test.go +++ b/cmd/csaf_downloader/stats_test.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH diff --git a/cmd/csaf_provider/actions.go b/cmd/csaf_provider/actions.go index 54d4e24..8f385e6 100644 --- a/cmd/csaf_provider/actions.go +++ b/cmd/csaf_provider/actions.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH diff --git a/cmd/csaf_provider/config.go b/cmd/csaf_provider/config.go index af99cc1..49a7204 100644 --- a/cmd/csaf_provider/config.go +++ b/cmd/csaf_provider/config.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/controller.go b/cmd/csaf_provider/controller.go index c8680ff..7f64fe2 100644 --- a/cmd/csaf_provider/controller.go +++ b/cmd/csaf_provider/controller.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/create.go b/cmd/csaf_provider/create.go index 8e882a5..56893c6 100644 --- a/cmd/csaf_provider/create.go +++ b/cmd/csaf_provider/create.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/files.go b/cmd/csaf_provider/files.go index 0b3c5ed..39a97e3 100644 --- a/cmd/csaf_provider/files.go +++ b/cmd/csaf_provider/files.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/indices.go b/cmd/csaf_provider/indices.go index a7ecd3b..805371b 100644 --- a/cmd/csaf_provider/indices.go +++ b/cmd/csaf_provider/indices.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/main.go b/cmd/csaf_provider/main.go index 2264676..8740e81 100644 --- a/cmd/csaf_provider/main.go +++ b/cmd/csaf_provider/main.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/mux.go b/cmd/csaf_provider/mux.go index 34b7e2e..021c074 100644 --- a/cmd/csaf_provider/mux.go +++ b/cmd/csaf_provider/mux.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/rolie.go b/cmd/csaf_provider/rolie.go index ea48480..98448bd 100644 --- a/cmd/csaf_provider/rolie.go +++ b/cmd/csaf_provider/rolie.go @@ -1,7 +1,7 @@ -// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. // -// SPDX-License-Identifier: MIT +// SPDX-License-Identifier: Apache-2.0 // // SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) // Software-Engineering: 2021 Intevation GmbH diff --git a/cmd/csaf_provider/tmpl/create.html b/cmd/csaf_provider/tmpl/create.html index 74fef6d..0b06f6f 100644 --- a/cmd/csaf_provider/tmpl/create.html +++ b/cmd/csaf_provider/tmpl/create.html @@ -1,8 +1,8 @@ diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index b02165b..81a45fa 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -100,22 +100,12 @@ The following example file documents all available configuration options: #tlps = ["csaf", "white", "amber", "green", "red"] # Make the provider create a ROLIE service document. -#create_service_document = true +#create_service_document = false # Make the provider create a ROLIE category document from a list of strings. # If a list item starts with `expr:` # the rest of the string is used as a JsonPath expression # to extract a string from the incoming advisories. -# If the result of the expression is a string this string -# is used. If the result is an array each element of -# this array is tested if it is a string or an array. -# If this test fails the expression fails. If the -# test succeeds the rules are applied recursively to -# collect all strings in the result. -# Suggested expressions are: -# - vendor, product family and product names: "expr:$.product_tree..branches[?(@.category==\"vendor\" || @.category==\"product_family\" || @.category==\"product_name\")].name" -# - CVEs: "expr:$.vulnerabilities[*].cve" -# - CWEs: "expr:$.vulnerabilities[*].cwe.id" # Strings not starting with `expr:` are taken verbatim. # By default no category documents are created. # This example provides an overview over the syntax, From 2f9d5658eb8c34dd782d95b9cd030e348163d30d Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Mon, 13 May 2024 11:50:06 +0200 Subject: [PATCH 077/235] docs: remove unused license file (#544) * Remove LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt as the only code using it was already removed with 6b9ecead89c5b40e86928c6e7f416903e0a495e1. --- LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt | 51 ------------------- 1 file changed, 51 deletions(-) delete mode 100644 LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt diff --git a/LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt b/LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt deleted file mode 100644 index fa1aad8..0000000 --- a/LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt +++ /dev/null @@ -1,51 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. From 7a5347803abc06dffbd106b8544e696d81ac3056 Mon Sep 17 00:00:00 2001 From: Florian von Samson <167841080+fvsamson@users.noreply.github.com> Date: Mon, 13 May 2024 14:36:03 +0200 Subject: [PATCH 078/235] docs: improve README.md's first sentence * Improve the structure of the sentence and the two links. --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 4c02b8f..bc9ae2a 100644 --- a/README.md +++ b/README.md @@ -10,9 +10,9 @@ # csaf_distribution -An implementation of a [CSAF](https://csaf.io/) -[2.0 Spec](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) -([Errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) +Implements a [CSAF](https://csaf.io/) +([specification v2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) +and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. From 33bd6bd78786564f56f458618df611e700eeeea3 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 12 Jun 2024 10:08:06 +0200 Subject: [PATCH 079/235] Extend unittest coverage in util --- util/csv_test.go | 40 +++++++++ util/file_test.go | 141 ++++++++++++++++++++++++++++++- util/hash_test.go | 109 ++++++++++++++++++++++++ util/json_test.go | 209 ++++++++++++++++++++++++++++++++++++++++++++++ util/set_test.go | 65 ++++++++++++++ util/url_test.go | 36 ++++++++ 6 files changed, 599 insertions(+), 1 deletion(-) create mode 100644 util/csv_test.go create mode 100644 util/hash_test.go create mode 100644 util/json_test.go create mode 100644 util/set_test.go create mode 100644 util/url_test.go diff --git a/util/csv_test.go b/util/csv_test.go new file mode 100644 index 0000000..a744b75 --- /dev/null +++ b/util/csv_test.go @@ -0,0 +1,40 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "bytes" + "testing" +) + +func TestCSV(t *testing.T) { + buf := new(bytes.Buffer) + csvWriter := NewFullyQuotedCSWWriter(buf) + for _, x := range [][]string{{"a", "b", "c"}, {"d", "e", "f"}} { + err := csvWriter.Write(x) + if err != nil { + t.Error(err) + } + } + + csvWriter.Flush() + err := csvWriter.Error() + if err != nil { + t.Error(err) + } + for _, want := range []string{`"a","b","c"`, `"d","e","f"`} { + got, err := buf.ReadString('\n') + if err != nil { + t.Error(err) + } + if got[:len(got)-1] != want { + t.Errorf("FullyQuotedCSWWriter: Expected %q but got %q.", want, got) + } + } +} diff --git a/util/file_test.go b/util/file_test.go index 3f648b8..320f3d4 100644 --- a/util/file_test.go +++ b/util/file_test.go @@ -10,6 +10,8 @@ package util import ( "bytes" + "os" + "path/filepath" "testing" ) @@ -55,8 +57,54 @@ func TestConformingFileName(t *testing.T) { } } -func TestNWriter(t *testing.T) { +func TestIDMatchesFilename(t *testing.T) { + pathEval := NewPathEval() + doc := make(map[string]interface{}) + doc["document"] = map[string]interface{}{ + "tracking": map[string]interface{}{ + "id": "valid.json", + }, + } + + err := IDMatchesFilename(pathEval, doc, "valid.json") + if err != nil { + t.Errorf("IDMatchesFilename: Expected nil, got %q", err) + } + + err = IDMatchesFilename(pathEval, doc, "different_file_name.json") + if err == nil { + t.Error("IDMatchesFilename: Expected error, got nil") + } + + doc["document"] = map[string]interface{}{ + "tracking": map[string]interface{}{}, + } + err = IDMatchesFilename(pathEval, doc, "valid.json") + if err == nil { + t.Error("IDMatchesFilename: Expected error, got nil") + } +} + +func TestPathExists(t *testing.T) { + got, err := PathExists("/this/path/does/not/exist") + if err != nil { + t.Error(err) + } + if got != false { + t.Error("PathExists: Expected false, got true") + } + dir := t.TempDir() + got, err = PathExists(dir) + if err != nil { + t.Error(err) + } + if got != true { + t.Error("PathExists: Expected true, got false") + } +} + +func TestNWriter(t *testing.T) { msg := []byte("Gruß!\n") first, second := msg[:len(msg)/2], msg[len(msg)/2:] @@ -78,3 +126,94 @@ func TestNWriter(t *testing.T) { t.Errorf("Expected %q, but got %q", msg, out) } } + +func TestWriteToFile(t *testing.T) { + filename := filepath.Join(t.TempDir(), "test_file") + wt := bytes.NewBufferString("test_data") + err := WriteToFile(filename, wt) + if err != nil { + t.Error(err) + } + fileData, err := os.ReadFile(filename) + if err != nil { + t.Error(err) + } + if !bytes.Equal(fileData, []byte("test_data")) { + t.Errorf("DeepCopy: Expected test_data, got %v", fileData) + } +} + +func TestMakeUniqFile(t *testing.T) { + dir := t.TempDir() + _, file, err := MakeUniqFile(dir) + if err != nil { + t.Error(err) + } + _, err = file.Write([]byte("test_data")) + if err != nil { + t.Error(err) + } + err = file.Close() + if err != nil { + t.Error(err) + } +} + +func Test_mkUniq(t *testing.T) { + dir := t.TempDir() + name, err := mkUniq(dir+"/", func(name string) error { + return nil + }) + if err != nil { + t.Error(err) + } + firstTime := true + name1, err := mkUniq(dir+"/", func(_ string) error { + if firstTime { + firstTime = false + return os.ErrExist + } + return nil + }) + if err != nil { + t.Error(err) + } + if name == name1 { + t.Errorf("mkUniq: Expected unique names, got %v and %v", name, name1) + } +} + +func TestDeepCopy(t *testing.T) { + dir := t.TempDir() + os.MkdirAll(filepath.Join(dir, "src/folder0"), 0755) + os.MkdirAll(filepath.Join(dir, "dst"), 0755) + os.MkdirAll(filepath.Join(dir, "dst1"), 0755) + err := os.WriteFile(filepath.Join(dir, "src/folder0/test_file"), []byte("test_data"), 0755) + if err != nil { + t.Error(err) + } + + err = DeepCopy(filepath.Join(dir, "dst"), filepath.Join(dir, "src")) + if err != nil { + t.Error(err) + } + + fileData, err := os.ReadFile(filepath.Join(dir, "dst/folder0/test_file")) + if err != nil { + t.Error(err) + } + + if !bytes.Equal(fileData, []byte("test_data")) { + t.Errorf("DeepCopy: Expected test_data, got %v", fileData) + } + + err = DeepCopy("/path/does/not/exist", filepath.Join(dir, "src")) + if err == nil { + t.Error("DeepCopy: Expected error, got nil") + } + + err = DeepCopy(filepath.Join(dir, "dst1"), "/path/does/not/exist") + if err == nil { + t.Error("DeepCopy: Expected error, got nil") + } +} diff --git a/util/hash_test.go b/util/hash_test.go new file mode 100644 index 0000000..ed0f0b2 --- /dev/null +++ b/util/hash_test.go @@ -0,0 +1,109 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "hash" + "os" + "path/filepath" + "reflect" + "strings" + "testing" +) + +func TestHashFromReader(t *testing.T) { + r := strings.NewReader("deadbeef") + want := []byte{0xde, 0xad, 0xbe, 0xef} + if got, err := HashFromReader(r); !reflect.DeepEqual(want, got) { + if err != nil { + t.Error(err) + } + t.Errorf("HashFromReader: Expected %v, got %v", want, got) + } +} + +func TestHashFromFile(t *testing.T) { + dir := t.TempDir() + filePath := filepath.Join(dir, "test_file") + testFile, err := os.Create(filePath) + if err != nil { + t.Error(err) + } + + testFile.WriteString("deadbeef") + want := []byte{0xde, 0xad, 0xbe, 0xef} + + testFile.Close() + + if got, err := HashFromFile(filePath); !reflect.DeepEqual(want, got) { + if err != nil { + t.Error(err) + } + t.Errorf("HashFromFile: Expected %v, got %v", want, got) + } +} + +type deadbeefHash struct { + hash.Hash +} + +func (deadbeefHash) Write(p []byte) (int, error) { return len(p), nil } +func (deadbeefHash) Sum(_ []byte) []byte { return []byte{0xde, 0xad, 0xbe, 0xef} } + +func TestWriteHashToFile(t *testing.T) { + dir := t.TempDir() + filePath := filepath.Join(dir, "test_file") + + hashArg := deadbeefHash{} + nameArg := "name" + want := "deadbeef " + nameArg + "\n" + + err := WriteHashToFile(filePath, nameArg, hashArg, []byte{}) + if err != nil { + t.Error(err) + } + testFile, err := os.Open(filePath) + if err != nil { + t.Error(err) + } + defer testFile.Close() + fileContent, err := os.ReadFile(filePath) + if err != nil { + t.Error(err) + } + if got := string(fileContent); got != want { + t.Errorf("WriteHashToFile: Expected %v, got %v", want, got) + } +} + +func TestWriteHashSumToFile(t *testing.T) { + dir := t.TempDir() + filePath := filepath.Join(dir, "test_file") + + sum := []byte{0xde, 0xad, 0xbe, 0xef} + nameArg := "name" + want := "deadbeef " + nameArg + "\n" + + err := WriteHashSumToFile(filePath, nameArg, sum) + if err != nil { + t.Error(err) + } + testFile, err := os.Open(filePath) + if err != nil { + t.Error(err) + } + defer testFile.Close() + fileContent, err := os.ReadFile(filePath) + if err != nil { + t.Error(err) + } + if got := string(fileContent); got != want { + t.Errorf("WriteHashSumToFile: Expected %v, got %v", want, got) + } +} diff --git a/util/json_test.go b/util/json_test.go new file mode 100644 index 0000000..452fabe --- /dev/null +++ b/util/json_test.go @@ -0,0 +1,209 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "context" + "reflect" + "testing" + "time" +) + +func TestPathEval_Compile(t *testing.T) { + pathEval := NewPathEval() + eval, err := pathEval.Compile("foo") + if err != nil { + t.Error(err) + } + + // Check caching + eval1, err := pathEval.Compile("foo") + if err != nil { + t.Error(err) + } + if reflect.ValueOf(eval).Pointer() != reflect.ValueOf(eval1).Pointer() { + t.Error("PathEval_Compile: Expected cached eval") + } + + got, err := eval.EvalInt(context.Background(), map[string]interface{}{"foo": 5}) + if err != nil { + t.Error(err) + } + if got != 5 { + t.Errorf("PathEval_Compile: Expected 5, got %v", got) + } +} + +func TestPathEval_Eval(t *testing.T) { + pathEval := NewPathEval() + _, err := pathEval.Eval("foo", nil) + if err == nil { + t.Error("PathEval_Eval: Expected error, got nil") + } + got, err := pathEval.Eval("foo", map[string]interface{}{"foo": 5}) + if err != nil { + t.Error(err) + } + if got != 5 { + t.Errorf("PathEval_Compile: Expected 5, got %v", got) + } +} + +func TestReMarshalMatcher(t *testing.T) { + var intDst int + var uintSrc uint = 2 + remarshalFunc := ReMarshalMatcher(&intDst) + err := remarshalFunc(uintSrc) + if err != nil { + t.Error(err) + } + if intDst != 2 { + t.Errorf("ReMarshalMatcher: Expected %v, got %v", uintSrc, intDst) + } +} + +func TestBoolMatcher(t *testing.T) { + var boolDst bool + boolFunc := BoolMatcher(&boolDst) + err := boolFunc(true) + if err != nil { + t.Error(err) + } + + if boolDst != true { + t.Error("BoolMatcher: Expected true got false") + } + + err = boolFunc(1) + if err == nil { + t.Error("BoolMatcher: Expected error, got nil") + } +} + +func TestStringMatcher(t *testing.T) { + var stringDst string + stringFunc := StringMatcher(&stringDst) + err := stringFunc("test") + if err != nil { + t.Error(err) + } + + if stringDst != "test" { + t.Errorf("StringMatcher: Expected test, got %v", stringDst) + } + + err = stringFunc(1) + if err == nil { + t.Error("StringMatcher: Expected error, got nil") + } +} + +func TestStringTreeMatcher(t *testing.T) { + var stringTreeDst []string + stringTreeFunc := StringTreeMatcher(&stringTreeDst) + err := stringTreeFunc([]any{"a", "a", "b"}) + if err != nil { + t.Error(err) + } + + wantAnySlice := []any{"a", "b"} + if reflect.DeepEqual(stringTreeDst, wantAnySlice) { + t.Errorf("StringTreeMatcher: Expected %v, got %v", wantAnySlice, stringTreeDst) + } + + err = stringTreeFunc([]string{"a", "a", "b"}) + if err == nil { + t.Error("StringTreeMatcher: Expected error, got nil") + } + + err = stringTreeFunc(1) + if err == nil { + t.Error("StringTreeMatcher: Expected error, got nil") + } +} + +func TestTimeMatcher(t *testing.T) { + var timeDst time.Time + timeFunc := TimeMatcher(&timeDst, time.RFC3339) + err := timeFunc("2024-03-18T12:57:48.236Z") + if err != nil { + t.Error(err) + } + wantTime := time.Date(2024, time.March, 18, 12, 57, 48, 236_000_000, time.UTC) + if timeDst != wantTime { + t.Errorf("TimeMatcher: Expected %v, got %v", wantTime, timeDst) + } + + err = timeFunc("") + if err == nil { + t.Error("TimeMatcher: Expected error, got nil") + } + + err = timeFunc(1) + if err == nil { + t.Error("TimeMatcher: Expected error, got nil") + } +} + +func TestPathEval_Extract(t *testing.T) { + pathEval := NewPathEval() + var result string + matcher := StringMatcher(&result) + err := pathEval.Extract("foo", matcher, true, map[string]interface{}{"foo": "bar"}) + if err != nil { + t.Error(err) + } + if result != "bar" { + t.Errorf("PathEval_Extract: Expected bar, got %v", result) + } +} + +func TestPathEval_Match(t *testing.T) { + var got string + doc := map[string]interface{}{"foo": "bar"} + + pe := NewPathEval() + pem := PathEvalMatcher{Expr: "foo", Action: StringMatcher(&got)} + + err := pe.Match([]PathEvalMatcher{pem}, doc) + if err != nil { + t.Error(err) + } + if got != "bar" { + t.Errorf("PathEval_Match: Expected bar, got %v", got) + } +} + +func TestPathEval_Strings(t *testing.T) { + pe := NewPathEval() + doc := map[string]interface{}{"foo": "bar"} + want := []string{"bar"} + + got, err := pe.Strings([]string{"foo"}, true, doc) + if err != nil { + t.Error(err) + } + + if !reflect.DeepEqual(got, want) { + t.Errorf("PathEval_Strings: Expected %v, got %v", want, got) + } +} + +func TestAsStrings(t *testing.T) { + arg := []interface{}{"foo", "bar"} + want := []string{"foo", "bar"} + + got, valid := AsStrings(arg) + if !valid { + t.Error("AsStrings: Expected true, got false") + } + if !reflect.DeepEqual(got, want) { + t.Errorf("AsStrings: Expected %v, got %v", want, got) + } +} diff --git a/util/set_test.go b/util/set_test.go new file mode 100644 index 0000000..a28878e --- /dev/null +++ b/util/set_test.go @@ -0,0 +1,65 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "reflect" + "sort" + "testing" +) + +func TestSet(t *testing.T) { + s := Set[int]{} + if s.Contains(0) { + t.Error("Set.Contains: Expected false got true") + } + s.Add(0) + if !s.Contains(0) { + t.Error("Set.Contains: Expected true got false") + } + + s0 := Set[int]{} + s1 := Set[int]{} + + s0.Add(0) + s0.Add(1) + + s1.Add(0) + s1.Add(1) + s1.Add(2) + + diff0 := s0.Difference(s1) + diff1 := s1.Difference(s0) + + if reflect.DeepEqual(diff0, diff1) { + t.Errorf("Set.Difference: %q and %q are different", diff0, diff1) + } + + if s0.ContainsAll(s1) { + t.Error("Set.ContainsAll: Expected false got true") + } + + if !s1.ContainsAll(s0) { + t.Error("Set.ContainsAll: Expected true got false") + } + + s2 := Set[int]{} + s2.Add(0) + s2.Add(1) + s2.Add(2) + s2.Add(3) + + wantKeys := []int{0, 1, 2, 3} + gotKeys := s2.Keys() + sort.Ints(gotKeys) + + if !reflect.DeepEqual(wantKeys, gotKeys) { + t.Errorf("Set.Keys: Expected %q got %q", wantKeys, gotKeys) + } +} diff --git a/util/url_test.go b/util/url_test.go new file mode 100644 index 0000000..dec73dc --- /dev/null +++ b/util/url_test.go @@ -0,0 +1,36 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package util + +import ( + "net/url" + "testing" +) + +func TestBaseUrl(t *testing.T) { + for _, x := range [][2]string{ + {`http://example.com`, `http://example.com/`}, + {`scheme://example.com`, `scheme://example.com/`}, + {`https://example.com`, `https://example.com/`}, + {`https://example.com:8080/`, `https://example.com:8080/`}, + {`https://user@example.com:8080/`, `https://user@example.com:8080/`}, + {`https://user@example.com:8080/resource`, `https://user@example.com:8080/`}, + {`https://user@example.com:8080/resource/`, `https://user@example.com:8080/resource/`}, + {`https://user@example.com:8080/resource/#fragment`, `https://user@example.com:8080/resource/`}, + {`https://user@example.com:8080/resource/?query=test#fragment`, `https://user@example.com:8080/resource/`}, + } { + url, _ := url.Parse(x[0]) + if got, err := BaseURL(url); got != x[1] { + if err != nil { + t.Error(err) + } + t.Errorf("%q: Expected %q but got %q.", x[0], x[1], got) + } + } +} From e2ad3d3f8302a81be9fe4d20153aac2f0dc041bd Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Fri, 21 Jun 2024 14:02:51 +0200 Subject: [PATCH 080/235] docs: fix licensing info for generated files (#542) * docs: fix licensing info for generated files * change generate_cvss_enums.go to note that the input file is relevant for the license. * change license and copyright of cvss20enums.go and cvss3enums.go to BSD-3-Clause and FIRST. * add reuse.software 3.0 compatible files for the schema cvss files. * Stamp right license into generated files. --------- Co-authored-by: Sascha L. Teichmann --- LICENSES/BSD-3-Clause.txt | 11 +++++++++++ csaf/cvss20enums.go | 9 ++------- csaf/cvss3enums.go | 9 ++------- csaf/generate_cvss_enums.go | 28 +++++++++++++++++++++------- csaf/schema/cvss-v2.0.json.license | 2 ++ csaf/schema/cvss-v3.0.json.license | 2 ++ csaf/schema/cvss-v3.1.json.license | 2 ++ 7 files changed, 42 insertions(+), 21 deletions(-) create mode 100644 LICENSES/BSD-3-Clause.txt create mode 100644 csaf/schema/cvss-v2.0.json.license create mode 100644 csaf/schema/cvss-v3.0.json.license create mode 100644 csaf/schema/cvss-v3.1.json.license diff --git a/LICENSES/BSD-3-Clause.txt b/LICENSES/BSD-3-Clause.txt new file mode 100644 index 0000000..ea890af --- /dev/null +++ b/LICENSES/BSD-3-Clause.txt @@ -0,0 +1,11 @@ +Copyright (c) . + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/csaf/cvss20enums.go b/csaf/cvss20enums.go index 7056f3e..97d2e10 100644 --- a/csaf/cvss20enums.go +++ b/csaf/cvss20enums.go @@ -1,10 +1,5 @@ -// This file is Free Software under the Apache-2.0 License -// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. -// -// SPDX-License-Identifier: Apache-2.0 -// -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH +// SPDX-License-Identifier: BSD-3-Clause +// SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. // // THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! diff --git a/csaf/cvss3enums.go b/csaf/cvss3enums.go index b8cf54f..32e01e3 100644 --- a/csaf/cvss3enums.go +++ b/csaf/cvss3enums.go @@ -1,10 +1,5 @@ -// This file is Free Software under the Apache-2.0 License -// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. -// -// SPDX-License-Identifier: Apache-2.0 -// -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH +// SPDX-License-Identifier: BSD-3-Clause +// SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. // // THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! diff --git a/csaf/generate_cvss_enums.go b/csaf/generate_cvss_enums.go index 7c9b9fd..c84ab15 100644 --- a/csaf/generate_cvss_enums.go +++ b/csaf/generate_cvss_enums.go @@ -14,21 +14,21 @@ import ( "bytes" "encoding/json" "flag" + "fmt" "go/format" "log" "os" + "regexp" "sort" "strings" "text/template" ) -const tmplText = `// This file is Free Software under the MIT License -// without warranty, see README.md and LICENSES/MIT.txt for details. -// -// SPDX-License-Identifier: MIT -// -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH +// We from Intevation consider the source code parts in the following +// template file as too insignificant to be a piece of work that gains +// "copyrights" protection in the European Union. So the license(s) +// of the output files are fully determined by the input file. +const tmplText = `// {{ $.License }} // // THIS FILE IS MACHINE GENERATED. EDIT WITH CARE! @@ -69,6 +69,7 @@ type definition struct { } type schema struct { + License []string `json:"license"` Definitions map[string]*definition `json:"definitions"` } @@ -137,9 +138,22 @@ func main() { } sort.Strings(defs) + license := "determine license(s) from input file and replace this line" + + pattern := regexp.MustCompile(`Copyright \(c\) (\d+), FIRST.ORG, INC.`) + for _, line := range s.License { + if m := pattern.FindStringSubmatch(line); m != nil { + license = fmt.Sprintf( + "SPDX-License-Identifier: BSD-3-Clause\n"+ + "// SPDX-FileCopyrightText: %s FIRST.ORG, INC.", m[1]) + break + } + } + var source bytes.Buffer check(tmpl.Execute(&source, map[string]any{ + "License": license, "Prefix": *prefix, "Definitions": s.Definitions, "Keys": defs, diff --git a/csaf/schema/cvss-v2.0.json.license b/csaf/schema/cvss-v2.0.json.license new file mode 100644 index 0000000..dd033e8 --- /dev/null +++ b/csaf/schema/cvss-v2.0.json.license @@ -0,0 +1,2 @@ +SPDX-License-Identifier: BSD-3-Clause +SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. diff --git a/csaf/schema/cvss-v3.0.json.license b/csaf/schema/cvss-v3.0.json.license new file mode 100644 index 0000000..dd033e8 --- /dev/null +++ b/csaf/schema/cvss-v3.0.json.license @@ -0,0 +1,2 @@ +SPDX-License-Identifier: BSD-3-Clause +SPDX-FileCopyrightText: 2017 FIRST.ORG, INC. diff --git a/csaf/schema/cvss-v3.1.json.license b/csaf/schema/cvss-v3.1.json.license new file mode 100644 index 0000000..f87ced8 --- /dev/null +++ b/csaf/schema/cvss-v3.1.json.license @@ -0,0 +1,2 @@ +SPDX-License-Identifier: BSD-3-Clause +SPDX-FileCopyrightText: 2021 FIRST.ORG, INC. From 56fadc3a80f66d0006203e9983138c5171b07fbf Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Fri, 21 Jun 2024 14:04:20 +0200 Subject: [PATCH 081/235] docs: fix typo in examples/aggregator.toml (#539) --- docs/examples/aggregator.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/examples/aggregator.toml b/docs/examples/aggregator.toml index ae1723d..2161079 100644 --- a/docs/examples/aggregator.toml +++ b/docs/examples/aggregator.toml @@ -51,7 +51,7 @@ insecure = true # rate = 1.8 # insecure = true write_indices = true - # If aggregator.category == "aggreator", set for an entry that should + # If aggregator.category == "aggregator", set for an entry that should # be listed in addition: category = "lister" # ignore_pattern = [".*white.*", ".*red.*"] From 3084cdbc371f03adfe22c1640b53b43fed5a0563 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Fri, 21 Jun 2024 15:35:30 +0200 Subject: [PATCH 082/235] Address comments --- util/csv_test.go | 6 ++---- util/file_test.go | 54 ++++++++++++++++++++++------------------------- util/hash_test.go | 6 ++---- util/json_test.go | 49 ++++++++++++++++-------------------------- 4 files changed, 47 insertions(+), 68 deletions(-) diff --git a/util/csv_test.go b/util/csv_test.go index a744b75..575d83d 100644 --- a/util/csv_test.go +++ b/util/csv_test.go @@ -17,15 +17,13 @@ func TestCSV(t *testing.T) { buf := new(bytes.Buffer) csvWriter := NewFullyQuotedCSWWriter(buf) for _, x := range [][]string{{"a", "b", "c"}, {"d", "e", "f"}} { - err := csvWriter.Write(x) - if err != nil { + if err := csvWriter.Write(x); err != nil { t.Error(err) } } csvWriter.Flush() - err := csvWriter.Error() - if err != nil { + if err := csvWriter.Error(); err != nil { t.Error(err) } for _, want := range []string{`"a","b","c"`, `"d","e","f"`} { diff --git a/util/file_test.go b/util/file_test.go index 320f3d4..28c5196 100644 --- a/util/file_test.go +++ b/util/file_test.go @@ -60,28 +60,25 @@ func TestConformingFileName(t *testing.T) { func TestIDMatchesFilename(t *testing.T) { pathEval := NewPathEval() - doc := make(map[string]interface{}) - doc["document"] = map[string]interface{}{ - "tracking": map[string]interface{}{ + doc := make(map[string]any) + doc["document"] = map[string]any{ + "tracking": map[string]any{ "id": "valid.json", }, } - err := IDMatchesFilename(pathEval, doc, "valid.json") - if err != nil { + if err := IDMatchesFilename(pathEval, doc, "valid.json"); err != nil { t.Errorf("IDMatchesFilename: Expected nil, got %q", err) } - err = IDMatchesFilename(pathEval, doc, "different_file_name.json") - if err == nil { + if err := IDMatchesFilename(pathEval, doc, "different_file_name.json"); err == nil { t.Error("IDMatchesFilename: Expected error, got nil") } - doc["document"] = map[string]interface{}{ - "tracking": map[string]interface{}{}, + doc["document"] = map[string]any{ + "tracking": map[string]any{}, } - err = IDMatchesFilename(pathEval, doc, "valid.json") - if err == nil { + if err := IDMatchesFilename(pathEval, doc, "valid.json"); err == nil { t.Error("IDMatchesFilename: Expected error, got nil") } } @@ -130,8 +127,7 @@ func TestNWriter(t *testing.T) { func TestWriteToFile(t *testing.T) { filename := filepath.Join(t.TempDir(), "test_file") wt := bytes.NewBufferString("test_data") - err := WriteToFile(filename, wt) - if err != nil { + if err := WriteToFile(filename, wt); err != nil { t.Error(err) } fileData, err := os.ReadFile(filename) @@ -149,12 +145,10 @@ func TestMakeUniqFile(t *testing.T) { if err != nil { t.Error(err) } - _, err = file.Write([]byte("test_data")) - if err != nil { + if _, err = file.Write([]byte("test_data")); err != nil { t.Error(err) } - err = file.Close() - if err != nil { + if err = file.Close(); err != nil { t.Error(err) } } @@ -185,16 +179,20 @@ func Test_mkUniq(t *testing.T) { func TestDeepCopy(t *testing.T) { dir := t.TempDir() - os.MkdirAll(filepath.Join(dir, "src/folder0"), 0755) - os.MkdirAll(filepath.Join(dir, "dst"), 0755) - os.MkdirAll(filepath.Join(dir, "dst1"), 0755) - err := os.WriteFile(filepath.Join(dir, "src/folder0/test_file"), []byte("test_data"), 0755) - if err != nil { - t.Error(err) + if err := os.MkdirAll(filepath.Join(dir, "src/folder0"), 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(filepath.Join(dir, "dst"), 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(filepath.Join(dir, "dst1"), 0755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(dir, "src/folder0/test_file"), []byte("test_data"), 0755); err != nil { + t.Fatal(err) } - err = DeepCopy(filepath.Join(dir, "dst"), filepath.Join(dir, "src")) - if err != nil { + if err := DeepCopy(filepath.Join(dir, "dst"), filepath.Join(dir, "src")); err != nil { t.Error(err) } @@ -207,13 +205,11 @@ func TestDeepCopy(t *testing.T) { t.Errorf("DeepCopy: Expected test_data, got %v", fileData) } - err = DeepCopy("/path/does/not/exist", filepath.Join(dir, "src")) - if err == nil { + if err = DeepCopy("/path/does/not/exist", filepath.Join(dir, "src")); err == nil { t.Error("DeepCopy: Expected error, got nil") } - err = DeepCopy(filepath.Join(dir, "dst1"), "/path/does/not/exist") - if err == nil { + if err = DeepCopy(filepath.Join(dir, "dst1"), "/path/does/not/exist"); err == nil { t.Error("DeepCopy: Expected error, got nil") } } diff --git a/util/hash_test.go b/util/hash_test.go index ed0f0b2..d690891 100644 --- a/util/hash_test.go +++ b/util/hash_test.go @@ -64,8 +64,7 @@ func TestWriteHashToFile(t *testing.T) { nameArg := "name" want := "deadbeef " + nameArg + "\n" - err := WriteHashToFile(filePath, nameArg, hashArg, []byte{}) - if err != nil { + if err := WriteHashToFile(filePath, nameArg, hashArg, []byte{}); err != nil { t.Error(err) } testFile, err := os.Open(filePath) @@ -90,8 +89,7 @@ func TestWriteHashSumToFile(t *testing.T) { nameArg := "name" want := "deadbeef " + nameArg + "\n" - err := WriteHashSumToFile(filePath, nameArg, sum) - if err != nil { + if err := WriteHashSumToFile(filePath, nameArg, sum); err != nil { t.Error(err) } testFile, err := os.Open(filePath) diff --git a/util/json_test.go b/util/json_test.go index 452fabe..ba18171 100644 --- a/util/json_test.go +++ b/util/json_test.go @@ -31,7 +31,7 @@ func TestPathEval_Compile(t *testing.T) { t.Error("PathEval_Compile: Expected cached eval") } - got, err := eval.EvalInt(context.Background(), map[string]interface{}{"foo": 5}) + got, err := eval.EvalInt(context.Background(), map[string]any{"foo": 5}) if err != nil { t.Error(err) } @@ -46,7 +46,7 @@ func TestPathEval_Eval(t *testing.T) { if err == nil { t.Error("PathEval_Eval: Expected error, got nil") } - got, err := pathEval.Eval("foo", map[string]interface{}{"foo": 5}) + got, err := pathEval.Eval("foo", map[string]any{"foo": 5}) if err != nil { t.Error(err) } @@ -59,8 +59,7 @@ func TestReMarshalMatcher(t *testing.T) { var intDst int var uintSrc uint = 2 remarshalFunc := ReMarshalMatcher(&intDst) - err := remarshalFunc(uintSrc) - if err != nil { + if err := remarshalFunc(uintSrc); err != nil { t.Error(err) } if intDst != 2 { @@ -71,8 +70,7 @@ func TestReMarshalMatcher(t *testing.T) { func TestBoolMatcher(t *testing.T) { var boolDst bool boolFunc := BoolMatcher(&boolDst) - err := boolFunc(true) - if err != nil { + if err := boolFunc(true); err != nil { t.Error(err) } @@ -80,8 +78,7 @@ func TestBoolMatcher(t *testing.T) { t.Error("BoolMatcher: Expected true got false") } - err = boolFunc(1) - if err == nil { + if err := boolFunc(1); err == nil { t.Error("BoolMatcher: Expected error, got nil") } } @@ -89,8 +86,7 @@ func TestBoolMatcher(t *testing.T) { func TestStringMatcher(t *testing.T) { var stringDst string stringFunc := StringMatcher(&stringDst) - err := stringFunc("test") - if err != nil { + if err := stringFunc("test"); err != nil { t.Error(err) } @@ -98,8 +94,7 @@ func TestStringMatcher(t *testing.T) { t.Errorf("StringMatcher: Expected test, got %v", stringDst) } - err = stringFunc(1) - if err == nil { + if err := stringFunc(1); err == nil { t.Error("StringMatcher: Expected error, got nil") } } @@ -107,8 +102,7 @@ func TestStringMatcher(t *testing.T) { func TestStringTreeMatcher(t *testing.T) { var stringTreeDst []string stringTreeFunc := StringTreeMatcher(&stringTreeDst) - err := stringTreeFunc([]any{"a", "a", "b"}) - if err != nil { + if err := stringTreeFunc([]any{"a", "a", "b"}); err != nil { t.Error(err) } @@ -117,13 +111,11 @@ func TestStringTreeMatcher(t *testing.T) { t.Errorf("StringTreeMatcher: Expected %v, got %v", wantAnySlice, stringTreeDst) } - err = stringTreeFunc([]string{"a", "a", "b"}) - if err == nil { + if err := stringTreeFunc([]string{"a", "a", "b"}); err == nil { t.Error("StringTreeMatcher: Expected error, got nil") } - err = stringTreeFunc(1) - if err == nil { + if err := stringTreeFunc(1); err == nil { t.Error("StringTreeMatcher: Expected error, got nil") } } @@ -131,8 +123,7 @@ func TestStringTreeMatcher(t *testing.T) { func TestTimeMatcher(t *testing.T) { var timeDst time.Time timeFunc := TimeMatcher(&timeDst, time.RFC3339) - err := timeFunc("2024-03-18T12:57:48.236Z") - if err != nil { + if err := timeFunc("2024-03-18T12:57:48.236Z"); err != nil { t.Error(err) } wantTime := time.Date(2024, time.March, 18, 12, 57, 48, 236_000_000, time.UTC) @@ -140,13 +131,11 @@ func TestTimeMatcher(t *testing.T) { t.Errorf("TimeMatcher: Expected %v, got %v", wantTime, timeDst) } - err = timeFunc("") - if err == nil { + if err := timeFunc(""); err == nil { t.Error("TimeMatcher: Expected error, got nil") } - err = timeFunc(1) - if err == nil { + if err := timeFunc(1); err == nil { t.Error("TimeMatcher: Expected error, got nil") } } @@ -155,8 +144,7 @@ func TestPathEval_Extract(t *testing.T) { pathEval := NewPathEval() var result string matcher := StringMatcher(&result) - err := pathEval.Extract("foo", matcher, true, map[string]interface{}{"foo": "bar"}) - if err != nil { + if err := pathEval.Extract("foo", matcher, true, map[string]any{"foo": "bar"}); err != nil { t.Error(err) } if result != "bar" { @@ -166,13 +154,12 @@ func TestPathEval_Extract(t *testing.T) { func TestPathEval_Match(t *testing.T) { var got string - doc := map[string]interface{}{"foo": "bar"} + doc := map[string]any{"foo": "bar"} pe := NewPathEval() pem := PathEvalMatcher{Expr: "foo", Action: StringMatcher(&got)} - err := pe.Match([]PathEvalMatcher{pem}, doc) - if err != nil { + if err := pe.Match([]PathEvalMatcher{pem}, doc); err != nil { t.Error(err) } if got != "bar" { @@ -182,7 +169,7 @@ func TestPathEval_Match(t *testing.T) { func TestPathEval_Strings(t *testing.T) { pe := NewPathEval() - doc := map[string]interface{}{"foo": "bar"} + doc := map[string]any{"foo": "bar"} want := []string{"bar"} got, err := pe.Strings([]string{"foo"}, true, doc) @@ -196,7 +183,7 @@ func TestPathEval_Strings(t *testing.T) { } func TestAsStrings(t *testing.T) { - arg := []interface{}{"foo", "bar"} + arg := []any{"foo", "bar"} want := []string{"foo", "bar"} got, valid := AsStrings(arg) From 5c6736b178b113f6abc2cad6efd9301d5fbbe18e Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 24 Jun 2024 11:57:38 +0200 Subject: [PATCH 083/235] Remove data races in downloader caused by shared use of json path eval. (#547) --- cmd/csaf_downloader/downloader.go | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index c5c3e02..a0cf34e 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -40,7 +40,6 @@ import ( type downloader struct { cfg *config keys *crypto.KeyRing - eval *util.PathEval validator csaf.RemoteValidator forwarder *forwarder mkdirMu sync.Mutex @@ -73,7 +72,6 @@ func newDownloader(cfg *config) (*downloader, error) { return &downloader{ cfg: cfg, - eval: util.NewPathEval(), validator: validator, }, nil } @@ -218,17 +216,20 @@ func (d *downloader) download(ctx context.Context, domain string) error { return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err) } + expr := util.NewPathEval() + if err := d.loadOpenPGPKeys( client, lpmd.Document, base, + expr, ); err != nil { return err } afp := csaf.NewAdvisoryFileProcessor( client, - d.eval, + expr, lpmd.Document, base) @@ -297,9 +298,10 @@ func (d *downloader) loadOpenPGPKeys( client util.Client, doc any, base *url.URL, + expr *util.PathEval, ) error { - src, err := d.eval.Eval("$.public_openpgp_keys", doc) + src, err := expr.Eval("$.public_openpgp_keys", doc) if err != nil { // no keys. return nil @@ -421,6 +423,7 @@ func (d *downloader) downloadWorker( dateExtract = util.TimeMatcher(&initialReleaseDate, time.RFC3339) lower = strings.ToLower(string(label)) stats = stats{} + expr = util.NewPathEval() ) // Add collected stats back to total. @@ -588,7 +591,7 @@ nextAdvisory: // Validate if filename is conforming. filenameCheck := func() error { - if err := util.IDMatchesFilename(d.eval, doc, filename); err != nil { + if err := util.IDMatchesFilename(expr, doc, filename); err != nil { stats.filenameFailed++ return fmt.Errorf("filename not conforming %s: %s", file.URL(), err) } @@ -651,7 +654,7 @@ nextAdvisory: continue } - if err := d.eval.Extract( + if err := expr.Extract( `$.document.tracking.initial_release_date`, dateExtract, false, doc, ); err != nil { slog.Warn("Cannot extract initial_release_date from advisory", From a46c286cf482451e8f395d367ef8ad3c705cdfd4 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Fri, 19 Apr 2024 14:04:12 +0200 Subject: [PATCH 084/235] fix: don't drop error messages from loading provider-metadata.json previously in case case of trying last resort dns, all other error messages were dropped --- csaf/providermetaloader.go | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index 203f2b3..0c4fc3b 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -173,6 +173,8 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata // We have a candidate. if wellknownResult.Valid() { wellknownGood = wellknownResult + } else { + pmdl.messages.AppendUnique(wellknownResult.Messages) } // Next load the PMDs from security.txt @@ -220,25 +222,28 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata } } // Take the good well-known. - wellknownGood.Messages.AppendUnique(pmdl.messages) + wellknownGood.Messages = pmdl.messages return wellknownGood } // Don't have well-known. Take first good from security.txt. ignoreExtras() - secGoods[0].Messages.AppendUnique(pmdl.messages) + secGoods[0].Messages = pmdl.messages return secGoods[0] } // If we have a good well-known take it. if wellknownGood != nil { - wellknownGood.Messages.AppendUnique(pmdl.messages) + wellknownGood.Messages = pmdl.messages return wellknownGood } // Last resort: fall back to DNS. dnsURL := "https://csaf.data.security." + domain - return pmdl.loadFromURL(dnsURL) + dnsURLResult := pmdl.loadFromURL(dnsURL) + pmdl.messages.AppendUnique(dnsURLResult.Messages) // keep order of messages consistent (i.e. last occurred message is last element) + dnsURLResult.Messages = pmdl.messages + return dnsURLResult } // loadFromSecurity loads the PMDs mentioned in the security.txt. From 51dc9b5bcb26c74bc3e46f3c9cf0e7d190cc41d1 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Fri, 19 Apr 2024 14:06:56 +0200 Subject: [PATCH 085/235] refactor: deduplicate filtering pmd results from security.txt already done in `loadFromSecurity` --- csaf/providermetaloader.go | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index 0c4fc3b..b21ddc6 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -178,20 +178,7 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata } // Next load the PMDs from security.txt - secResults := pmdl.loadFromSecurity(domain) - - // Filter out the results which are valid. - var secGoods []*LoadedProviderMetadata - - for _, result := range secResults { - if len(result.Messages) > 0 { - // If there where validation issues append them - // to the overall report - pmdl.messages.AppendUnique(pmdl.messages) - } else { - secGoods = append(secGoods, result) - } - } + secGoods := pmdl.loadFromSecurity(domain) // Mention extra CSAF entries in security.txt. ignoreExtras := func() { @@ -246,7 +233,7 @@ func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata return dnsURLResult } -// loadFromSecurity loads the PMDs mentioned in the security.txt. +// loadFromSecurity loads the PMDs mentioned in the security.txt. Only valid PMDs are returned. func (pmdl *ProviderMetadataLoader) loadFromSecurity(domain string) []*LoadedProviderMetadata { // If .well-known fails try legacy location. From 1e531de82d35ab549fa4b07f828f21a38554c3a5 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Mon, 15 Jul 2024 10:52:13 +0200 Subject: [PATCH 086/235] fix: don't require debug level to print error details on failed loading of provider metadata json --- cmd/csaf_aggregator/processor.go | 14 +++++++++----- cmd/csaf_downloader/downloader.go | 13 ++++++++----- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index 9f10a77..5cb3628 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -89,17 +89,21 @@ func (w *worker) locateProviderMetadata(domain string) error { lpmd := loader.Load(domain) - if w.processor.cfg.Verbose { + if !lpmd.Valid() { for i := range lpmd.Messages { - w.log.Info( + w.log.Error( "Loading provider-metadata.json", "domain", domain, "message", lpmd.Messages[i].Message) } - } - - if !lpmd.Valid() { return fmt.Errorf("no valid provider-metadata.json found for '%s'", domain) + } else if w.processor.cfg.Verbose { + for i := range lpmd.Messages { + w.log.Debug( + "Loading provider-metadata.json", + "domain", domain, + "message", lpmd.Messages[i].Message) + } } w.metadataProvider = lpmd.Document diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index a0cf34e..e370f55 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -199,7 +199,14 @@ func (d *downloader) download(ctx context.Context, domain string) error { lpmd := loader.Load(domain) - if d.cfg.verbose() { + if !lpmd.Valid() { + for i := range lpmd.Messages { + slog.Error("Loading provider-metadata.json", + "domain", domain, + "message", lpmd.Messages[i].Message) + } + return fmt.Errorf("no valid provider-metadata.json found for '%s': ", domain) + } else if d.cfg.verbose() { for i := range lpmd.Messages { slog.Debug("Loading provider-metadata.json", "domain", domain, @@ -207,10 +214,6 @@ func (d *downloader) download(ctx context.Context, domain string) error { } } - if !lpmd.Valid() { - return fmt.Errorf("no valid provider-metadata.json found for '%s'", domain) - } - base, err := url.Parse(lpmd.URL) if err != nil { return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err) From bcf4d2f64aa267efe0e4cbf1a844d130fb708d23 Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Tue, 16 Jul 2024 12:00:09 +0200 Subject: [PATCH 087/235] fix error message The error message had a trailing `:` which suggest that there are some details which were truncated. However the details are already printed before in the log. --- cmd/csaf_downloader/downloader.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index e370f55..badf060 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -205,7 +205,7 @@ func (d *downloader) download(ctx context.Context, domain string) error { "domain", domain, "message", lpmd.Messages[i].Message) } - return fmt.Errorf("no valid provider-metadata.json found for '%s': ", domain) + return fmt.Errorf("no valid provider-metadata.json found for '%s'", domain) } else if d.cfg.verbose() { for i := range lpmd.Messages { slog.Debug("Loading provider-metadata.json", From a131b0fb4bc97592d8ac4d80280706359b2a6811 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 25 Jul 2024 15:36:54 +0200 Subject: [PATCH 088/235] Improve SHA* marking --- cmd/csaf_checker/processor.go | 45 ++++++++++++++--- cmd/csaf_downloader/downloader.go | 34 ++++++++----- csaf/advisories.go | 83 +++++++++++++++---------------- 3 files changed, 99 insertions(+), 63 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 451a315..de42e18 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -20,6 +20,7 @@ import ( "fmt" "io" "log" + "log/slog" "net/http" "net/url" "path/filepath" @@ -138,7 +139,7 @@ func (m *topicMessages) info(format string, args ...any) { m.add(InfoType, format, args...) } -// use signals that we going to use this topic. +// use signals that we're going to use this topic. func (m *topicMessages) use() { if *m == nil { *m = []Message{} @@ -164,7 +165,7 @@ func (m *topicMessages) hasErrors() bool { return false } -// newProcessor returns an initilaized processor. +// newProcessor returns an initialized processor. func newProcessor(cfg *config) (*processor, error) { var validator csaf.RemoteValidator @@ -594,10 +595,15 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { var file csaf.AdvisoryFile - if sha256 != "" || sha512 != "" || sign != "" { - file = csaf.HashedAdvisoryFile{url, sha256, sha512, sign} - } else { - file = csaf.PlainAdvisoryFile(url) + switch { + case sha256 == "" && sha512 == "": + slog.Error("No hash listed on ROLIE feed", "file", url) + return + case sign == "": + slog.Error("No signature listed on ROLIE feed", "file", url) + return + default: + file = csaf.PlainAdvisoryFile{Path: url, SHA256: sha256, SHA512: sha512, Sign: sign} } files = append(files, file) @@ -888,7 +894,16 @@ func (p *processor) checkIndex(base string, mask whereType) error { p.badIntegrities.error("index.txt contains invalid URL %q in line %d", u, line) continue } - files = append(files, csaf.PlainAdvisoryFile(u)) + + SHA256 := p.checkURL(u + ".sha256") + SHA512 := p.checkURL(u + ".sha512") + sign := p.checkURL(u + ".asc") + files = append(files, csaf.PlainAdvisoryFile{ + Path: u, + SHA256: SHA256, + SHA512: SHA512, + Sign: sign, + }) } return files, scanner.Err() }() @@ -906,6 +921,15 @@ func (p *processor) checkIndex(base string, mask whereType) error { return p.integrity(files, base, mask, p.badIndices.add) } +// checkURL returns the URL if it is accessible. +func (p *processor) checkURL(url string) string { + _, err := p.client.Head(url) + if err != nil { + return url + } + return "" +} + // checkChanges fetches the "changes.csv" and calls the "checkTLS" method for HTTPs checks. // It extracts the file content, tests the column number and the validity of the time format // of the fields' values and if they are sorted properly. Then it passes the files to the @@ -970,9 +994,14 @@ func (p *processor) checkChanges(base string, mask whereType) error { continue } path := r[pathColumn] + + SHA256 := p.checkURL(path + ".sha256") + SHA512 := p.checkURL(path + ".sha512") + sign := p.checkURL(path + ".asc") + times, files = append(times, t), - append(files, csaf.PlainAdvisoryFile(path)) + append(files, csaf.PlainAdvisoryFile{Path: path, SHA256: SHA256, SHA512: SHA512, Sign: sign}) } return times, files, nil }() diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index badf060..025ed65 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -501,23 +501,31 @@ nextAdvisory: signData []byte ) - // Only hash when we have a remote counter part we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { - slog.Warn("Cannot fetch SHA256", - "url", file.SHA256URL(), - "error", err) + if file.SHA256URL() == "" { + slog.Info("SHA256 not present", "file", file.URL()) } else { - s256 = sha256.New() - writers = append(writers, s256) + // Only hash when we have a remote counterpart we can compare it with. + if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + slog.Warn("Cannot fetch SHA256", + "url", file.SHA256URL(), + "error", err) + } else { + s256 = sha256.New() + writers = append(writers, s256) + } } - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { - slog.Warn("Cannot fetch SHA512", - "url", file.SHA512URL(), - "error", err) + if file.SHA512URL() == "" { + slog.Info("SHA512 not present", "file", file.URL()) } else { - s512 = sha512.New() - writers = append(writers, s512) + if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + slog.Warn("Cannot fetch SHA512", + "url", file.SHA512URL(), + "error", err) + } else { + s512 = sha512.New() + writers = append(writers, s512) + } } // Remember the data as we need to store it to file later. diff --git a/csaf/advisories.go b/csaf/advisories.go index 6f07648..4aa7f52 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -34,55 +34,30 @@ type AdvisoryFile interface { // PlainAdvisoryFile is a simple implementation of checkFile. // The hash and signature files are directly constructed by extending // the file name. -type PlainAdvisoryFile string +type PlainAdvisoryFile struct { + Path string + SHA256 string + SHA512 string + Sign string +} // URL returns the URL of this advisory. -func (paf PlainAdvisoryFile) URL() string { return string(paf) } +func (paf PlainAdvisoryFile) URL() string { return paf.Path } // SHA256URL returns the URL of SHA256 hash file of this advisory. -func (paf PlainAdvisoryFile) SHA256URL() string { return string(paf) + ".sha256" } +func (paf PlainAdvisoryFile) SHA256URL() string { return paf.SHA256 } // SHA512URL returns the URL of SHA512 hash file of this advisory. -func (paf PlainAdvisoryFile) SHA512URL() string { return string(paf) + ".sha512" } +func (paf PlainAdvisoryFile) SHA512URL() string { return paf.SHA512 } // SignURL returns the URL of signature file of this advisory. -func (paf PlainAdvisoryFile) SignURL() string { return string(paf) + ".asc" } +func (paf PlainAdvisoryFile) SignURL() string { return paf.Sign } // LogValue implements [slog.LogValuer] func (paf PlainAdvisoryFile) LogValue() slog.Value { return slog.GroupValue(slog.String("url", paf.URL())) } -// HashedAdvisoryFile is a more involed version of checkFile. -// Here each component can be given explicitly. -// If a component is not given it is constructed by -// extending the first component. -type HashedAdvisoryFile [4]string - -func (haf HashedAdvisoryFile) name(i int, ext string) string { - if haf[i] != "" { - return haf[i] - } - return haf[0] + ext -} - -// URL returns the URL of this advisory. -func (haf HashedAdvisoryFile) URL() string { return haf[0] } - -// SHA256URL returns the URL of SHA256 hash file of this advisory. -func (haf HashedAdvisoryFile) SHA256URL() string { return haf.name(1, ".sha256") } - -// SHA512URL returns the URL of SHA512 hash file of this advisory. -func (haf HashedAdvisoryFile) SHA512URL() string { return haf.name(2, ".sha512") } - -// SignURL returns the URL of signature file of this advisory. -func (haf HashedAdvisoryFile) SignURL() string { return haf.name(3, ".asc") } - -// LogValue implements [slog.LogValuer] -func (haf HashedAdvisoryFile) LogValue() slog.Value { - return slog.GroupValue(slog.String("url", haf.URL())) -} - // AdvisoryFileProcessor implements the extraction of // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { @@ -120,7 +95,7 @@ func empty(arr []string) bool { return true } -// Process extracts the adivisory filenames and passes them with +// Process extracts the advisory filenames and passes them with // the corresponding label to fn. func (afp *AdvisoryFileProcessor) Process( fn func(TLPLabel, []AdvisoryFile) error, @@ -201,6 +176,15 @@ func (afp *AdvisoryFileProcessor) Process( return nil } +// checkURL returns the URL if it is accessible. +func (afp *AdvisoryFileProcessor) checkURL(url string) string { + _, err := afp.client.Head(url) + if err != nil { + return url + } + return "" +} + // loadChanges loads baseURL/changes.csv and returns a list of files // prefixed by baseURL/. func (afp *AdvisoryFileProcessor) loadChanges( @@ -257,8 +241,19 @@ func (afp *AdvisoryFileProcessor) loadChanges( lg("%q contains an invalid URL %q in line %d", changesURL, path, line) continue } + + self := base.JoinPath(path).String() + sha256 := afp.checkURL(self + ".sha256") + sha512 := afp.checkURL(self + ".sha512") + sign := afp.checkURL(self + ".asc") + files = append(files, - PlainAdvisoryFile(base.JoinPath(path).String())) + PlainAdvisoryFile{ + Path: path, + SHA256: sha256, + SHA512: sha512, + Sign: sign, + }) } return files, nil } @@ -325,7 +320,6 @@ func (afp *AdvisoryFileProcessor) processROLIE( } rfeed.Entries(func(entry *Entry) { - // Filter if we have date checking. if afp.AgeAccept != nil { if t := time.Time(entry.Updated); !t.IsZero() && !afp.AgeAccept(t) { @@ -359,10 +353,15 @@ func (afp *AdvisoryFileProcessor) processROLIE( var file AdvisoryFile - if sha256 != "" || sha512 != "" || sign != "" { - file = HashedAdvisoryFile{self, sha256, sha512, sign} - } else { - file = PlainAdvisoryFile(self) + switch { + case sha256 == "" && sha512 == "": + slog.Error("No hash listed on ROLIE feed", "file", self) + return + case sign == "": + slog.Error("No signature listed on ROLIE feed", "file", self) + return + default: + file = PlainAdvisoryFile{self, sha256, sha512, sign} } files = append(files, file) From 0ab851a87428ddce7a55a335bd0d58e8dc541e73 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 25 Jul 2024 13:26:29 +0200 Subject: [PATCH 089/235] Use a default user agent --- cmd/csaf_aggregator/config.go | 5 +++++ cmd/csaf_checker/processor.go | 8 +++----- cmd/csaf_downloader/downloader.go | 8 +++----- cmd/csaf_downloader/forwarder.go | 8 +++----- util/client.go | 5 +++++ 5 files changed, 19 insertions(+), 15 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 9808542..b73286c 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -301,6 +301,11 @@ func (c *config) httpClient(p *provider) util.Client { Client: client, Header: c.ExtraHeader, } + default: + client = &util.HeaderClient{ + Client: client, + Header: http.Header{}, + } } if c.Verbose { diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 451a315..49e815c 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -435,11 +435,9 @@ func (p *processor) fullClient() util.Client { client := util.Client(&hClient) // Add extra headers. - if len(p.cfg.ExtraHeader) > 0 { - client = &util.HeaderClient{ - Client: client, - Header: p.cfg.ExtraHeader, - } + client = &util.HeaderClient{ + Client: client, + Header: p.cfg.ExtraHeader, } // Add optional URL logging. diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index badf060..9cef294 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -126,11 +126,9 @@ func (d *downloader) httpClient() util.Client { client := util.Client(&hClient) // Add extra headers. - if len(d.cfg.ExtraHeader) > 0 { - client = &util.HeaderClient{ - Client: client, - Header: d.cfg.ExtraHeader, - } + client = &util.HeaderClient{ + Client: client, + Header: d.cfg.ExtraHeader, } // Add optional URL logging. diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index 3b1435a..c3681eb 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -111,11 +111,9 @@ func (f *forwarder) httpClient() util.Client { client := util.Client(&hClient) // Add extra headers. - if len(f.cfg.ForwardHeader) > 0 { - client = &util.HeaderClient{ - Client: client, - Header: f.cfg.ForwardHeader, - } + client = &util.HeaderClient{ + Client: client, + Header: f.cfg.ForwardHeader, } // Add optional URL logging. diff --git a/util/client.go b/util/client.go index 5a11c7b..441aaaa 100644 --- a/util/client.go +++ b/util/client.go @@ -61,6 +61,11 @@ func (hc *HeaderClient) Do(req *http.Request) (*http.Response, error) { req.Header.Add(key, v) } } + + // Use default user agent if none is set + if userAgent := hc.Header.Get("User-Agent"); userAgent == "" { + req.Header.Add("User-Agent", "csaf-distribution/"+SemVersion) + } return hc.Client.Do(req) } From 3a67fb52100dac0ca64719899afb431fbb8bd590 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Wed, 31 Jul 2024 11:00:40 +0200 Subject: [PATCH 090/235] Add user-agent documentation --- docs/csaf_checker.md | 13 +++++++++---- docs/csaf_downloader.md | 13 +++++++++++-- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index 58f77ca..a5bc0bf 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -30,9 +30,12 @@ Help Options: Will check all given _domains_, by trying each as a CSAF provider. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. + If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and checking proceeds from there. If no config file is explictly given the follwing places are searched for a config file: + ``` ~/.config/csaf/checker.toml ~/.csaf_checker.toml @@ -41,6 +44,7 @@ csaf_checker.toml with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems. Supported options in config files: + ``` output = "" format = "json" @@ -58,9 +62,10 @@ validator_preset = ["mandatory"] ``` Usage example: -` ./csaf_checker example.com -f html --rate=5.3 -H apikey:SECRET -o check-results.html` +`./csaf_checker example.com -f html --rate=5.3 -H apikey:SECRET -o check-results.html` Each performed check has a return type of either 0,1 or 2: + ``` type 0: success type 1: warning @@ -70,16 +75,16 @@ type 2: error The checker result is a success if no checks resulted in type 2, and a failure otherwise. The option `timerange` allows to only check advisories from a given time -interval. It can only be given once. See the +interval. It can only be given once. See the [downloader documentation](csaf_downloader.md#timerange-option) for details. - You can ignore certain advisories while checking by specifying a list of regular expressions[^1] to match their URLs by using the `ignorepattern` option. E.g. `-i='.*white.*' -i='*.red.*'` will ignore files which URLs contain the sub strings **white** or **red**. In the config file this has to be noted as: + ``` ignorepattern = [".*white.*", ".*red.*"] ``` @@ -88,7 +93,7 @@ ignorepattern = [".*white.*", ".*red.*"] The `role` given in the `provider-metadata.json` is not yet considered to change the overall result, -see https://github.com/csaf-poc/csaf_distribution/issues/221 . +see . If a provider hosts one or more advisories with a TLP level of AMBER or RED, then these advisories must be access protected. To check these advisories, authorization can be given via custom headers or certificates. diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index fcf6634..2831cb4 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -1,4 +1,5 @@ ## csaf_downloader + A tool to download CSAF documents from CSAF providers. ### Usage @@ -39,6 +40,8 @@ Help Options: Will download all CSAF documents for the given _domains_, by trying each as a CSAF provider. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. + If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and downloading procedes from there. Increasing the number of workers opens more connections to the web servers @@ -47,6 +50,7 @@ However, since this also increases the load on the servers, their administrators have taken countermeasures to limit this. If no config file is explictly given the follwing places are searched for a config file: + ``` ~/.config/csaf/downloader.toml ~/.csaf_downloader.toml @@ -56,6 +60,7 @@ csaf_downloader.toml with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems. Supported options in config files: + ``` # directory # not set by default insecure = false @@ -90,6 +95,7 @@ option. E.g. `-i='.*white.*' -i='*.red.*'` will ignore files which URLs contain the sub strings **white** or **red**. In the config file this has to be noted as: + ``` ignorepattern = [".*white.*", ".*red.*"] ``` @@ -106,16 +112,18 @@ into a given intervall. There are three possible notations: and 'y' for years are recognized. In these cases only integer values are accepted without any fractions. Some examples: + - `"3h"` means downloading the advisories that have changed in the last three hours. - - `"30m"` .. changed within the last thirty minutes. + - `"30m"` .. changed within the last thirty minutes. - `"3M2m"` .. changed within the last three months and two minutes. - - `"2y"` .. changed within the last two years. + - `"2y"` .. changed within the last two years. 2. Absolute. If the given string is an RFC 3339 date timestamp the time interval between this date and now is used. E.g. `"2006-01-02"` means that all files between 2006 January 2nd and now going to being downloaded. Accepted patterns are: + - `"2006-01-02T15:04:05Z"` - `"2006-01-02T15:04:05+07:00"` - `"2006-01-02T15:04:05-07:00"` @@ -134,6 +142,7 @@ into a given intervall. There are three possible notations: All interval boundaries are inclusive. #### Forwarding + The downloader is able to forward downloaded advisories and their checksums, OpenPGP signatures and validation results to an HTTP endpoint. The details of the implemented API are described [here](https://github.com/mfd2007/csaf_upload_interface). From be2e4e74242774d9e8bfb97f13886d9c4fa6e241 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Wed, 31 Jul 2024 11:42:45 +0200 Subject: [PATCH 091/235] Improve hash path handling of directory feeds --- cmd/csaf_checker/processor.go | 25 ++----------- cmd/csaf_downloader/downloader.go | 29 +++++++++------- csaf/advisories.go | 58 ++++++++++++++++++------------- 3 files changed, 52 insertions(+), 60 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index de42e18..38f3e34 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -895,15 +895,7 @@ func (p *processor) checkIndex(base string, mask whereType) error { continue } - SHA256 := p.checkURL(u + ".sha256") - SHA512 := p.checkURL(u + ".sha512") - sign := p.checkURL(u + ".asc") - files = append(files, csaf.PlainAdvisoryFile{ - Path: u, - SHA256: SHA256, - SHA512: SHA512, - Sign: sign, - }) + files = append(files, csaf.DirectoryAdvisoryFile{Path: u}) } return files, scanner.Err() }() @@ -921,15 +913,6 @@ func (p *processor) checkIndex(base string, mask whereType) error { return p.integrity(files, base, mask, p.badIndices.add) } -// checkURL returns the URL if it is accessible. -func (p *processor) checkURL(url string) string { - _, err := p.client.Head(url) - if err != nil { - return url - } - return "" -} - // checkChanges fetches the "changes.csv" and calls the "checkTLS" method for HTTPs checks. // It extracts the file content, tests the column number and the validity of the time format // of the fields' values and if they are sorted properly. Then it passes the files to the @@ -995,13 +978,9 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] - SHA256 := p.checkURL(path + ".sha256") - SHA512 := p.checkURL(path + ".sha512") - sign := p.checkURL(path + ".asc") - times, files = append(times, t), - append(files, csaf.PlainAdvisoryFile{Path: path, SHA256: SHA256, SHA512: SHA512, Sign: sign}) + append(files, csaf.DirectoryAdvisoryFile{Path: path}) } return times, files, nil }() diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 025ed65..3bf3647 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -501,31 +501,31 @@ nextAdvisory: signData []byte ) - if file.SHA256URL() == "" { - slog.Info("SHA256 not present", "file", file.URL()) - } else { - // Only hash when we have a remote counterpart we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + // Only hash when we have a remote counterpart we can compare it with. + if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + if !file.IsDirectory() { slog.Warn("Cannot fetch SHA256", "url", file.SHA256URL(), "error", err) } else { - s256 = sha256.New() - writers = append(writers, s256) + slog.Info("SHA256 not present", "file", file.URL()) } + } else { + s256 = sha256.New() + writers = append(writers, s256) } - if file.SHA512URL() == "" { - slog.Info("SHA512 not present", "file", file.URL()) - } else { - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + if !file.IsDirectory() { slog.Warn("Cannot fetch SHA512", "url", file.SHA512URL(), "error", err) } else { - s512 = sha512.New() - writers = append(writers, s512) + slog.Info("SHA512 not present", "file", file.URL()) } + } else { + s512 = sha512.New() + writers = append(writers, s512) } // Remember the data as we need to store it to file later. @@ -757,6 +757,9 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, } func loadHash(client util.Client, p string) ([]byte, []byte, error) { + if p == "" { + return nil, nil, fmt.Errorf("no hash path provided") + } resp, err := client.Get(p) if err != nil { return nil, nil, err diff --git a/csaf/advisories.go b/csaf/advisories.go index 4aa7f52..d05331c 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -29,11 +29,10 @@ type AdvisoryFile interface { SHA256URL() string SHA512URL() string SignURL() string + IsDirectory() bool } -// PlainAdvisoryFile is a simple implementation of checkFile. -// The hash and signature files are directly constructed by extending -// the file name. +// PlainAdvisoryFile contains all relevant urls of a remote file. type PlainAdvisoryFile struct { Path string SHA256 string @@ -53,11 +52,41 @@ func (paf PlainAdvisoryFile) SHA512URL() string { return paf.SHA512 } // SignURL returns the URL of signature file of this advisory. func (paf PlainAdvisoryFile) SignURL() string { return paf.Sign } +// IsDirectory returns true, if was fetched via directory feeds. +func (paf PlainAdvisoryFile) IsDirectory() bool { return false } + // LogValue implements [slog.LogValuer] func (paf PlainAdvisoryFile) LogValue() slog.Value { return slog.GroupValue(slog.String("url", paf.URL())) } +// DirectoryAdvisoryFile only contains the base file path. +// The hash and signature files are directly constructed by extending +// the file name. +type DirectoryAdvisoryFile struct { + Path string +} + +// URL returns the URL of this advisory. +func (daf DirectoryAdvisoryFile) URL() string { return daf.Path } + +// SHA256URL returns the URL of SHA256 hash file of this advisory. +func (daf DirectoryAdvisoryFile) SHA256URL() string { return daf.Path + ".sha256" } + +// SHA512URL returns the URL of SHA512 hash file of this advisory. +func (daf DirectoryAdvisoryFile) SHA512URL() string { return daf.Path + ".sha512" } + +// SignURL returns the URL of signature file of this advisory. +func (daf DirectoryAdvisoryFile) SignURL() string { return daf.Path + ".asc" } + +// IsDirectory returns true, if was fetched via directory feeds. +func (daf DirectoryAdvisoryFile) IsDirectory() bool { return true } + +// LogValue implements [slog.LogValuer] +func (daf DirectoryAdvisoryFile) LogValue() slog.Value { + return slog.GroupValue(slog.String("url", daf.URL())) +} + // AdvisoryFileProcessor implements the extraction of // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { @@ -69,7 +98,7 @@ type AdvisoryFileProcessor struct { base *url.URL } -// NewAdvisoryFileProcessor constructs an filename extractor +// NewAdvisoryFileProcessor constructs a filename extractor // for a given metadata document. func NewAdvisoryFileProcessor( client util.Client, @@ -176,15 +205,6 @@ func (afp *AdvisoryFileProcessor) Process( return nil } -// checkURL returns the URL if it is accessible. -func (afp *AdvisoryFileProcessor) checkURL(url string) string { - _, err := afp.client.Head(url) - if err != nil { - return url - } - return "" -} - // loadChanges loads baseURL/changes.csv and returns a list of files // prefixed by baseURL/. func (afp *AdvisoryFileProcessor) loadChanges( @@ -242,18 +262,8 @@ func (afp *AdvisoryFileProcessor) loadChanges( continue } - self := base.JoinPath(path).String() - sha256 := afp.checkURL(self + ".sha256") - sha512 := afp.checkURL(self + ".sha512") - sign := afp.checkURL(self + ".asc") - files = append(files, - PlainAdvisoryFile{ - Path: path, - SHA256: sha256, - SHA512: sha512, - Sign: sign, - }) + DirectoryAdvisoryFile{Path: base.JoinPath(path).String()}) } return files, nil } From 1a2ce684ff94a0f47a4b9737698b1961b4aae91b Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 1 Aug 2024 14:53:23 +0200 Subject: [PATCH 092/235] improve default header * use `csaf_distribution` with an underscore as underscores are allowed by RFC9110 and it is more consistent as it is used with underscore at other places. * change example to `VERSION` to indicate that this is dynamic. --- docs/csaf_checker.md | 2 +- docs/csaf_downloader.md | 2 +- util/client.go | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index a5bc0bf..0b223b6 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -30,7 +30,7 @@ Help Options: Will check all given _domains_, by trying each as a CSAF provider. -If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf_distribution/VERSION` is sent. If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and checking proceeds from there. diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 2831cb4..003ae4a 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -40,7 +40,7 @@ Help Options: Will download all CSAF documents for the given _domains_, by trying each as a CSAF provider. -If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf-distribution/version` is sent. +If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf_distribution/VERSION` is sent. If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and downloading procedes from there. diff --git a/util/client.go b/util/client.go index 441aaaa..b4478ca 100644 --- a/util/client.go +++ b/util/client.go @@ -64,7 +64,7 @@ func (hc *HeaderClient) Do(req *http.Request) (*http.Response, error) { // Use default user agent if none is set if userAgent := hc.Header.Get("User-Agent"); userAgent == "" { - req.Header.Add("User-Agent", "csaf-distribution/"+SemVersion) + req.Header.Add("User-Agent", "csaf_distribution/"+SemVersion) } return hc.Client.Do(req) } From 13a635c7e34c56e4ff39cbfc1ef1e2b6e7bd230a Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 1 Aug 2024 15:43:35 +0200 Subject: [PATCH 093/235] Add user-agent documentation to aggregator --- docs/csaf_aggregator.md | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/docs/csaf_aggregator.md b/docs/csaf_aggregator.md index 36cbe7e..661871c 100644 --- a/docs/csaf_aggregator.md +++ b/docs/csaf_aggregator.md @@ -16,6 +16,7 @@ Help Options: ``` If no config file is explictly given the follwing places are searched for a config file: + ``` ~/.config/csaf/aggregator.toml ~/.csaf_aggregator.toml @@ -25,6 +26,7 @@ csaf_aggregator.toml with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems. Usage example for a single run, to test if the config is good: + ```bash ./csaf_aggregator -c docs/examples/aggregator.toml ``` @@ -62,7 +64,6 @@ SHELL=/bin/bash 30 0-23 * * * $HOME/bin/csaf_aggregator --config /etc/csaf_aggregator.toml --interim >> /var/log/csaf_aggregator/interim.log 2>&1 ``` - #### serve via web server Serve the paths where the aggregator writes its `html/` output @@ -78,7 +79,6 @@ a template. For the aggregator the difference is that you can leave out the cgi-bin part, potentially commend out the TLS client parts and adjust the `root` path accordingly. - ### config options The config file is written in [TOML](https://toml.io/en/v1.0.0). @@ -118,10 +118,12 @@ Next we have two TOML _tables_: aggregator // basic infos for the aggregator object remote_validator // config for optional remote validation checker ``` + [See the provider config](csaf_provider.md#provider-options) about how to configure `remote_validator`. At last there is the TOML _array of tables_: + ``` providers // each entry to be mirrored or listed ``` @@ -148,6 +150,9 @@ header Where valid `name` and `domain` settings are required. +If no user agent is specified with `header = "user-agent:custom-agent/1.0"` +then the default agent in the form of `csaf_distribution/VERSION` is sent. + If you want an entry to be listed instead of mirrored in a `aggregator.category == "aggregator"` instance, set `category` to `lister` in the entry. @@ -165,15 +170,16 @@ To offer an easy way of assorting CSAF documents by criteria like document category, languages or values of the branch category within the product tree, ROLIE category values can be configured in `categories`. This can either -be done using an array of strings taken literally or, by prepending `"expr:"`. -The latter is evaluated as JSONPath and the result will be added into the +be done using an array of strings taken literally or, by prepending `"expr:"`. +The latter is evaluated as JSONPath and the result will be added into the categories document. For a more detailed explanation and examples, [refer to the provider config](csaf_provider.md#provider-options). - #### Example config file + + ```toml workers = 2 folder = "/var/csaf_aggregator" @@ -233,8 +239,8 @@ insecure = true category = "lister" # ignore_pattern = [".*white.*", ".*red.*"] ``` - + #### Publish others' advisories From 8feddc70e1c945e2cf2ec8cab92525aa8e89106d Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 23 Jul 2024 13:41:03 +0200 Subject: [PATCH 094/235] feat: no longer require to be root user to call setup scripts --- docs/scripts/Readme.md | 4 ++-- docs/scripts/setupValidationService.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/scripts/Readme.md b/docs/scripts/Readme.md index a3b932d..95f39b2 100644 --- a/docs/scripts/Readme.md +++ b/docs/scripts/Readme.md @@ -12,10 +12,10 @@ and configures nginx for serving TLS connections. As creating the folders needs to authenticate with the csaf_provider, the configurations of TLS server and Client certificate authentication should be set. So it is recommended to call the scripts in this order: `TLSConfigsForITest.sh`, `TLSClientConfigsForITest.sh`, `setupProviderForITest.sh` -Calling example (as root): +Calling example (as user with sudo privileges): ``` bash curl --fail -O https://raw.githubusercontent.com/csaf-poc/csaf_distribution/main/docs/scripts/prepareUbuntuInstanceForITests.sh - bash prepareUbuntuInstanceForITests.sh + sudo bash prepareUbuntuInstanceForITests.sh git clone https://github.com/csaf-poc/csaf_distribution.git # --branch pushd csaf_distribution/docs/scripts/ diff --git a/docs/scripts/setupValidationService.sh b/docs/scripts/setupValidationService.sh index d6f8ba7..4a7dfd7 100755 --- a/docs/scripts/setupValidationService.sh +++ b/docs/scripts/setupValidationService.sh @@ -21,7 +21,7 @@ echo ' remote_validator= { "url" = "http://localhost:8082", "presets" = ["mandatory"], "cache" = "/var/lib/csaf/validations.db" } ' | sudo tee --append /etc/csaf/config.toml -npm install pm2 -g +sudo npm install pm2 -g pushd ~ git clone https://github.com/secvisogram/csaf-validator-service.git From 9037574d967da7ad80972edde4b74810c735e11c Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 8 Aug 2024 12:17:58 +0200 Subject: [PATCH 095/235] Improve PGP fingerprint handling Warn if no fingerprint is specified and give more details, if fingerprint comparison fails. Closes #555 --- cmd/csaf_checker/processor.go | 9 +++++++-- cmd/csaf_downloader/downloader.go | 7 ++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 451a315..b5f949e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1449,7 +1449,7 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { } // checkPGPKeys checks if the OpenPGP keys are available and valid, fetches -// the the remotely keys and compares the fingerprints. +// the remotely keys and compares the fingerprints. // As a result of these a respective error messages are passed to badPGP method // in case of errors. It returns nil if all checks are passed. func (p *processor) checkPGPKeys(_ string) error { @@ -1518,8 +1518,13 @@ func (p *processor) checkPGPKeys(_ string) error { continue } + if key.Fingerprint == "" { + p.badPGPs.warn("No fingerprint for public OpenPGP key found.") + continue + } + if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { - p.badPGPs.error("Fingerprint of public OpenPGP key %s does not match remotely loaded.", u) + p.badPGPs.error("Given Fingerprint (%q) of public OpenPGP key %q does not match remotely loaded (%q).", string(key.Fingerprint), u, ckey.GetFingerprint()) continue } if p.keys == nil { diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index badf060..a5eeb71 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -366,10 +366,15 @@ func (d *downloader) loadOpenPGPKeys( continue } + if key.Fingerprint == "" { + slog.Warn("No fingerprint for public OpenPGP key found.") + continue + } + if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { slog.Warn( "Fingerprint of public OpenPGP key does not match remotely loaded", - "url", u) + "url", u, "fingerprint", key.Fingerprint, "remote-fingerprint", ckey.GetFingerprint()) continue } if d.keys == nil { From c2e24f7bbb1b49f5bcdd6163aad4b03e05398f31 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 6 Sep 2024 18:18:37 +0200 Subject: [PATCH 096/235] Remove check for empty fingerprint The schema validation already catches this error and this check will never run. --- cmd/csaf_checker/processor.go | 5 ----- cmd/csaf_downloader/downloader.go | 5 ----- 2 files changed, 10 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index b5f949e..d05a9ec 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1518,11 +1518,6 @@ func (p *processor) checkPGPKeys(_ string) error { continue } - if key.Fingerprint == "" { - p.badPGPs.warn("No fingerprint for public OpenPGP key found.") - continue - } - if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { p.badPGPs.error("Given Fingerprint (%q) of public OpenPGP key %q does not match remotely loaded (%q).", string(key.Fingerprint), u, ckey.GetFingerprint()) continue diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index a5eeb71..7e07449 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -366,11 +366,6 @@ func (d *downloader) loadOpenPGPKeys( continue } - if key.Fingerprint == "" { - slog.Warn("No fingerprint for public OpenPGP key found.") - continue - } - if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) { slog.Warn( "Fingerprint of public OpenPGP key does not match remotely loaded", From 5231b3386b8126b248cc8cc9be451063caa17aab Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Sat, 7 Sep 2024 09:58:14 +0200 Subject: [PATCH 097/235] docs: improve code comment (minor) --- cmd/csaf_checker/processor.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index d05a9ec..c0034ca 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1449,9 +1449,9 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { } // checkPGPKeys checks if the OpenPGP keys are available and valid, fetches -// the remotely keys and compares the fingerprints. -// As a result of these a respective error messages are passed to badPGP method -// in case of errors. It returns nil if all checks are passed. +// the remote pubkeys and compares the fingerprints. +// As a result of these checks respective error messages are passed +// to badPGP methods. It returns nil if all checks are passed. func (p *processor) checkPGPKeys(_ string) error { p.badPGPs.use() From 37c9eaf3467acd8e7ad08dfb3a076cf9849c67cc Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Mon, 9 Sep 2024 10:25:13 +0200 Subject: [PATCH 098/235] Add CLI flags to specify what hash is preferred --- cmd/csaf_downloader/config.go | 12 +++++++- cmd/csaf_downloader/downloader.go | 50 ++++++++++++++----------------- 2 files changed, 34 insertions(+), 28 deletions(-) diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index dcfc090..71c5055 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -41,6 +41,13 @@ const ( validationUnsafe = validationMode("unsafe") ) +type hashAlgorithm string + +const ( + algSha256 = hashAlgorithm("SHA256") + algSha2512 = hashAlgorithm("SHA512") +) + type config struct { Directory string `short:"d" long:"directory" description:"DIRectory to store the downloaded files in" value-name:"DIR" toml:"directory"` Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider" toml:"insecure"` @@ -79,6 +86,9 @@ type config struct { clientCerts []tls.Certificate ignorePattern filter.PatternMatcher + + //lint:ignore SA5008 We are using choice or than once: sha256, sha512 + PreferredHash hashAlgorithm `long:"preferred_hash" short:"h" choice:"sha256" choice:"sha512" value-name:"HASH" description:"HASH to prefer" toml:"preferred_hash"` } // configPaths are the potential file locations of the config file. @@ -220,7 +230,7 @@ func (cfg *config) prepareLogging() error { w = f } ho := slog.HandlerOptions{ - //AddSource: true, + // AddSource: true, Level: cfg.LogLevel.Level, ReplaceAttr: dropSubSeconds, } diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 3bf3647..3cb7332 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -53,7 +53,6 @@ type downloader struct { const failedValidationDir = "failed_validation" func newDownloader(cfg *config) (*downloader, error) { - var validator csaf.RemoteValidator if cfg.RemoteValidator != "" { @@ -103,7 +102,6 @@ func logRedirect(req *http.Request, via []*http.Request) error { } func (d *downloader) httpClient() util.Client { - hClient := http.Client{} if d.cfg.verbose() { @@ -253,7 +251,6 @@ func (d *downloader) downloadFiles( label csaf.TLPLabel, files []csaf.AdvisoryFile, ) error { - var ( advisoryCh = make(chan csaf.AdvisoryFile) errorCh = make(chan error) @@ -303,7 +300,6 @@ func (d *downloader) loadOpenPGPKeys( base *url.URL, expr *util.PathEval, ) error { - src, err := expr.Eval("$.public_openpgp_keys", doc) if err != nil { // no keys. @@ -357,7 +353,6 @@ func (d *downloader) loadOpenPGPKeys( defer res.Body.Close() return crypto.NewKeyFromArmoredReader(res.Body) }() - if err != nil { slog.Warn( "Reading public OpenPGP key failed", @@ -501,31 +496,35 @@ nextAdvisory: signData []byte ) - // Only hash when we have a remote counterpart we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA256", - "url", file.SHA256URL(), - "error", err) + if (d.cfg.PreferredHash != "sha512" || file.SHA512URL() == "") && file.SHA256URL() != "" { + // Only hash when we have a remote counterpart we can compare it with. + if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { + if !file.IsDirectory() { + slog.Warn("Cannot fetch SHA256", + "url", file.SHA256URL(), + "error", err) + } else { + slog.Info("SHA256 not present", "file", file.URL()) + } } else { - slog.Info("SHA256 not present", "file", file.URL()) + s256 = sha256.New() + writers = append(writers, s256) } - } else { - s256 = sha256.New() - writers = append(writers, s256) } - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA512", - "url", file.SHA512URL(), - "error", err) + if (d.cfg.PreferredHash != "sha256" || file.SHA256URL() == "") && file.SHA512URL() != "" { + if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { + if !file.IsDirectory() { + slog.Warn("Cannot fetch SHA512", + "url", file.SHA512URL(), + "error", err) + } else { + slog.Info("SHA512 not present", "file", file.URL()) + } } else { - slog.Info("SHA512 not present", "file", file.URL()) + s512 = sha512.New() + writers = append(writers, s512) } - } else { - s512 = sha512.New() - writers = append(writers, s512) } // Remember the data as we need to store it to file later. @@ -757,9 +756,6 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, } func loadHash(client util.Client, p string) ([]byte, []byte, error) { - if p == "" { - return nil, nil, fmt.Errorf("no hash path provided") - } resp, err := client.Get(p) if err != nil { return nil, nil, err From c148a18dba7684b17af5306569d2b4a737332e3b Mon Sep 17 00:00:00 2001 From: 4echow <33332102+4echow@users.noreply.github.com> Date: Thu, 12 Sep 2024 09:27:12 +0200 Subject: [PATCH 099/235] docs:: fix miner typo in csaf_downloader.md --- docs/csaf_downloader.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 003ae4a..07c6e63 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -158,7 +158,7 @@ key protection mechanism based on RFC 1423, see Thus it considered experimental and most likely to be removed in a future release. Please only use this option, if you fully understand the security implications! -Note that for fully automated processes, it usually not make sense +Note that for fully automated processes, it usually does not make sense to protect the client certificate's private key with a passphrase. Because the passphrase has to be accessible to the process anyway to run unattented. In this situation the processing environment should be secured From f36c96e79864e9aea64ce6b1017521b2492b6492 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Mon, 9 Sep 2024 16:04:47 +0200 Subject: [PATCH 100/235] Upgrade to go v1.22 Closes #570 --- .github/workflows/itest.yml | 2 +- README.md | 2 +- docs/Development.md | 2 +- go.mod | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index eff11c2..364c330 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -9,7 +9,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v3 with: - go-version: 1.21.0 + go-version: 1.22.0 - name: Set up Node.js uses: actions/setup-node@v3 diff --git a/README.md b/README.md index bc9ae2a..14ac64f 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ Download the binaries from the most recent release assets on Github. ### Build from sources -- A recent version of **Go** (1.21+) should be installed. [Go installation](https://go.dev/doc/install) +- A recent version of **Go** (1.22+) should be installed. [Go installation](https://go.dev/doc/install) - Clone the repository `git clone https://github.com/csaf-poc/csaf_distribution.git ` diff --git a/docs/Development.md b/docs/Development.md index e7ce388..5c4df22 100644 --- a/docs/Development.md +++ b/docs/Development.md @@ -3,7 +3,7 @@ ## Supported Go versions We support the latest version and the one before -the latest version of Go (currently 1.22 and 1.21). +the latest version of Go (currently 1.22 and 1.23). ## Generated files diff --git a/go.mod b/go.mod index 5b33724..52f1f02 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/csaf-poc/csaf_distribution/v3 -go 1.21 +go 1.22 require ( github.com/BurntSushi/toml v1.3.2 From c0de0c2b6de4d0b739badfcbe6d259739af9cffa Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 27 Sep 2024 15:20:36 +0200 Subject: [PATCH 101/235] Check if hash present, before sending a request --- cmd/csaf_checker/processor.go | 44 +++++++++++------------------------ 1 file changed, 14 insertions(+), 30 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 38f3e34..ede8fd6 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -84,10 +84,8 @@ type reporter interface { report(*processor, *Domain) } -var ( - // errContinue indicates that the current check should continue. - errContinue = errors.New("continue") -) +// errContinue indicates that the current check should continue. +var errContinue = errors.New("continue") type whereType byte @@ -167,7 +165,6 @@ func (m *topicMessages) hasErrors() bool { // newProcessor returns an initialized processor. func newProcessor(cfg *config) (*processor, error) { - var validator csaf.RemoteValidator if cfg.RemoteValidator != "" { @@ -240,7 +237,6 @@ func (p *processor) reset() { // Then it calls the report method on each report from the given "reporters" parameter for each domain. // It returns a pointer to the report and nil, otherwise an error. func (p *processor) run(domains []string) (*Report, error) { - report := Report{ Date: ReportTime{Time: time.Now().UTC()}, Version: util.SemVersion, @@ -297,7 +293,6 @@ func (p *processor) run(domains []string) (*Report, error) { // fillMeta fills the report with extra informations from provider metadata. func (p *processor) fillMeta(domain *Domain) error { - if p.pmd == nil { return nil } @@ -323,7 +318,6 @@ func (p *processor) fillMeta(domain *Domain) error { // domainChecks compiles a list of checks which should be performed // for a given domain. func (p *processor) domainChecks(domain string) []func(*processor, string) error { - // If we have a direct domain url we dont need to // perform certain checks. direct := strings.HasPrefix(domain, "https://") @@ -393,7 +387,6 @@ func (p *processor) markChecked(s string, mask whereType) bool { } func (p *processor) checkRedirect(r *http.Request, via []*http.Request) error { - url := r.URL.String() p.checkTLS(url) if p.redirects == nil { @@ -495,7 +488,6 @@ func (p *processor) usedAuthorizedClient() bool { // rolieFeedEntries loads the references to the advisory files for a given feed. func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { - client := p.httpClient() res, err := client.Get(feed) p.badDirListings.use() @@ -546,7 +538,6 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { var files []csaf.AdvisoryFile rfeed.Entries(func(entry *csaf.Entry) { - // Filter if we have date checking. if accept := p.cfg.Range; accept != nil { if t := time.Time(entry.Updated); !t.IsZero() && !accept.Contains(t) { @@ -759,14 +750,20 @@ func (p *processor) integrity( // Check hashes p.badIntegrities.use() - for _, x := range []struct { + type hash struct { ext string url func() string hash []byte - }{ - {"SHA256", f.SHA256URL, s256.Sum(nil)}, - {"SHA512", f.SHA512URL, s512.Sum(nil)}, - } { + } + hashes := []hash{} + if f.SHA256URL() != "" { + hashes = append(hashes, hash{"SHA256", f.SHA256URL, s256.Sum(nil)}) + } + if f.SHA512URL() != "" { + hashes = append(hashes, hash{"SHA512", f.SHA512URL, s512.Sum(nil)}) + } + + for _, x := range hashes { hu, err := url.Parse(x.url()) if err != nil { lg(ErrorType, "Bad URL %s: %v", x.url(), err) @@ -918,7 +915,6 @@ func (p *processor) checkIndex(base string, mask whereType) error { // of the fields' values and if they are sorted properly. Then it passes the files to the // "integrity" functions. It returns error if some test fails, otherwise nil. func (p *processor) checkChanges(base string, mask whereType) error { - bu, err := url.Parse(base) if err != nil { return err @@ -978,8 +974,7 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] - times, files = - append(times, t), + times, files = append(times, t), append(files, csaf.DirectoryAdvisoryFile{Path: path}) } return times, files, nil @@ -1152,7 +1147,6 @@ func (p *processor) checkMissing(string) error { // checkInvalid goes over all found adivisories URLs and checks // if file name conforms to standard. func (p *processor) checkInvalid(string) error { - p.badDirListings.use() var invalids []string @@ -1174,7 +1168,6 @@ func (p *processor) checkInvalid(string) error { // checkListing goes over all found adivisories URLs and checks // if their parent directory is listable. func (p *processor) checkListing(string) error { - p.badDirListings.use() pgs := pages{} @@ -1209,7 +1202,6 @@ func (p *processor) checkListing(string) error { // checkWhitePermissions checks if the TLP:WHITE advisories are // available with unprotected access. func (p *processor) checkWhitePermissions(string) error { - var ids []string for id, open := range p.labelChecker.whiteAdvisories { if !open { @@ -1235,7 +1227,6 @@ func (p *processor) checkWhitePermissions(string) error { // According to the result, the respective error messages added to // badProviderMetadata. func (p *processor) checkProviderMetadata(domain string) bool { - p.badProviderMetadata.use() client := p.httpClient() @@ -1282,7 +1273,6 @@ func (p *processor) checkSecurity(domain string, legacy bool) (int, string) { // checkSecurityFolder checks the security.txt in a given folder. func (p *processor) checkSecurityFolder(folder string) string { - client := p.httpClient() path := folder + "security.txt" res, err := client.Get(path) @@ -1349,7 +1339,6 @@ func (p *processor) checkSecurityFolder(folder string) string { // and serves the "provider-metadata.json". // It returns an empty string if all checks are passed, otherwise the errormessage. func (p *processor) checkDNS(domain string) string { - client := p.httpClient() path := "https://csaf.data.security." + domain res, err := client.Get(path) @@ -1359,7 +1348,6 @@ func (p *processor) checkDNS(domain string) string { if res.StatusCode != http.StatusOK { return fmt.Sprintf("Fetching %s failed. Status code %d (%s)", path, res.StatusCode, res.Status) - } hash := sha256.New() defer res.Body.Close() @@ -1378,7 +1366,6 @@ func (p *processor) checkDNS(domain string) string { // available under the /.well-known/csaf/ directory. Returns the errormessage if // an error was encountered, or an empty string otherwise func (p *processor) checkWellknown(domain string) string { - client := p.httpClient() path := "https://" + domain + "/.well-known/csaf/provider-metadata.json" @@ -1408,7 +1395,6 @@ func (p *processor) checkWellknown(domain string) string { // The function returns nil, unless errors outside the checks were found. // In that case, errors are returned. func (p *processor) checkWellknownSecurityDNS(domain string) error { - warningsW := p.checkWellknown(domain) // Security check for well known (default) and legacy location warningsS, sDMessage := p.checkSecurity(domain, false) @@ -1461,7 +1447,6 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { // As a result of these a respective error messages are passed to badPGP method // in case of errors. It returns nil if all checks are passed. func (p *processor) checkPGPKeys(_ string) error { - p.badPGPs.use() src, err := p.expr.Eval("$.public_openpgp_keys", p.pmd) @@ -1520,7 +1505,6 @@ func (p *processor) checkPGPKeys(_ string) error { defer res.Body.Close() return crypto.NewKeyFromArmoredReader(res.Body) }() - if err != nil { p.badPGPs.error("Reading public OpenPGP key %s failed: %v", u, err) continue From f7dc3f5ec74ea8ccada62f64a15cd9d6f9fd8b72 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Sun, 29 Sep 2024 09:08:01 +0200 Subject: [PATCH 102/235] Use .test TLD for integration setup (#577) .local is reserved for local-area networks, and .localhost is reserved for loopback devices. Using .test allows easier usage for different test setups. * https://www.rfc-editor.org/rfc/rfc2606#section-2 defines the "test." top level domain and "localhost.". * https://www.rfc-editor.org/rfc/rfc6761.html#section-6.2 explains how different implementations can use "test.". --- docs/development-ca.md | 2 +- docs/scripts/setupProviderForITest.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/development-ca.md b/docs/development-ca.md index 483732c..21f4ef4 100644 --- a/docs/development-ca.md +++ b/docs/development-ca.md @@ -55,7 +55,7 @@ signing_key encryption_key non_repudiation -dns_name = "*.local" +dns_name = "*.test" dns_name = "localhost" serial = 010 diff --git a/docs/scripts/setupProviderForITest.sh b/docs/scripts/setupProviderForITest.sh index 1a57f1e..f9d7d18 100755 --- a/docs/scripts/setupProviderForITest.sh +++ b/docs/scripts/setupProviderForITest.sh @@ -17,7 +17,7 @@ sudo chgrp -R www-data /var/www sudo chmod -R g+ws /var/www export NGINX_CONFIG_PATH=/etc/nginx/sites-available/default -export DNS_NAME=csaf.data.security.localhost +export DNS_NAME=csaf.data.security.test sudo cp /usr/share/doc/fcgiwrap/examples/nginx.conf /etc/nginx/fcgiwrap.conf From 18e2e35e7cf0d92d463eaad736074c5c9d43165b Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Tue, 29 Oct 2024 09:49:27 +0100 Subject: [PATCH 103/235] Update README.md with link update alert --- README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/README.md b/README.md index 14ac64f..1953854 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,13 @@ Software-Engineering: 2024 Intevation GmbH --> + +> [!IMPORTANT] +> Adjust your links and go module paths if you still have `csaf-poc` in them, to avoid future breakage. +> (This repository was moved here on 2024-10-28. The old one is decrecated +> and redirection will be switched off in a few months.) + + # csaf_distribution Implements a [CSAF](https://csaf.io/) @@ -16,6 +23,7 @@ and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. + ## Tools for users ### [csaf_downloader](docs/csaf_downloader.md) is a tool for downloading advisories from a provider. From bdd8aa0a9415da3641cf2624ac0f57381e16b9b2 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Tue, 29 Oct 2024 09:50:26 +0100 Subject: [PATCH 104/235] Update README.md --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 1953854..e6ea77f 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,6 @@ and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. Includes an uploader command line tool for the trusted provider. - ## Tools for users ### [csaf_downloader](docs/csaf_downloader.md) is a tool for downloading advisories from a provider. From bf057e2fa8f25e155bb616ebe98523c0f76e5148 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 30 Oct 2024 10:51:38 +0100 Subject: [PATCH 105/235] Update repo move alert in README.md HTML links can be adjusted right now, go module paths will have to wait a bit. --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index e6ea77f..53920d8 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,9 @@ > [!IMPORTANT] -> Adjust your links and go module paths if you still have `csaf-poc` in them, to avoid future breakage. +> To avoid future breakage, if you still use `csaf-poc`: +> 1. Adjust your HTML links. +> 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. > (This repository was moved here on 2024-10-28. The old one is decrecated > and redirection will be switched off in a few months.) From 6ebe7f5f5d3845cc1c9fa26d209de12b4870150a Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 30 Oct 2024 10:53:15 +0100 Subject: [PATCH 106/235] Update repo move alert in README.md use a better phrasing --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 53920d8..f28567e 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ > 1. Adjust your HTML links. > 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. > (This repository was moved here on 2024-10-28. The old one is decrecated -> and redirection will be switched off in a few months.) +> and redirection will be switched off a few months later.) # csaf_distribution From 7aa95c03ca1f5a19914cce0158fb3212cab80d19 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Wed, 30 Oct 2024 11:03:18 +0100 Subject: [PATCH 107/235] fix: bring aggregator schema to errata01 (#583) --- csaf/schema/aggregator_json_schema.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/csaf/schema/aggregator_json_schema.json b/csaf/schema/aggregator_json_schema.json index 7929f1f..cdad109 100644 --- a/csaf/schema/aggregator_json_schema.json +++ b/csaf/schema/aggregator_json_schema.json @@ -175,7 +175,7 @@ "type": "object", "required": [ "metadata", - "mirror", + "mirrors", "update_interval" ], "properties": { From 1aad5331d2d8d992467e8b5694c43f53dae2d22b Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 30 Oct 2024 11:15:31 +0100 Subject: [PATCH 108/235] Update README.md reformat a bit --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index f28567e..8bdfd88 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ > To avoid future breakage, if you still use `csaf-poc`: > 1. Adjust your HTML links. > 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. +> > (This repository was moved here on 2024-10-28. The old one is decrecated > and redirection will be switched off a few months later.) From 1c860a1ab21692f176ecc033fc484dcebc9f5728 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Wed, 30 Oct 2024 11:22:24 +0100 Subject: [PATCH 109/235] Update README.md: Fix: typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8bdfd88..568bf03 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ > 1. Adjust your HTML links. > 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. > -> (This repository was moved here on 2024-10-28. The old one is decrecated +> (This repository was moved here on 2024-10-28. The old one is deprecated > and redirection will be switched off a few months later.) From ffadad38c6cc9aa9b29af2489ea4487d676e0f34 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Wed, 30 Oct 2024 15:53:22 +0100 Subject: [PATCH 110/235] improve test setupscript by adding missing zip Add zip as packages to be installed in preparation as the `make dist` target uses it. --- docs/scripts/prepareUbuntuInstanceForITests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scripts/prepareUbuntuInstanceForITests.sh b/docs/scripts/prepareUbuntuInstanceForITests.sh index ea88fc4..f124044 100755 --- a/docs/scripts/prepareUbuntuInstanceForITests.sh +++ b/docs/scripts/prepareUbuntuInstanceForITests.sh @@ -6,7 +6,7 @@ set -e # by installing the required packages. apt update -apt install -y make bash curl gnupg sed tar git nginx fcgiwrap gnutls-bin +apt install -y make bash curl gnupg sed tar git nginx fcgiwrap gnutls-bin zip # Install Go from binary distribution latest_go="$(curl https://go.dev/VERSION\?m=text| head -1).linux-amd64.tar.gz" From e8706e5eb99d40f464587c6d3aba2e2484a3fd6a Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Wed, 30 Oct 2024 14:46:50 +0100 Subject: [PATCH 111/235] feat: perform go path repo move * Change the go module path from github.com/csaf-poc/csaf_distribution to github.com/gocsaf/csaf. * Rename archive for release tarballs. * Adjust testing scripts and documentation. --- .github/workflows/itest.yml | 2 +- .github/workflows/release.yml | 4 ++-- Makefile | 6 +++--- README.md | 10 +++++----- cmd/csaf_aggregator/client.go | 2 +- cmd/csaf_aggregator/config.go | 12 ++++++------ cmd/csaf_aggregator/full.go | 4 ++-- cmd/csaf_aggregator/indices.go | 4 ++-- cmd/csaf_aggregator/interim.go | 4 ++-- cmd/csaf_aggregator/lazytransaction.go | 2 +- cmd/csaf_aggregator/lister.go | 4 ++-- cmd/csaf_aggregator/main.go | 2 +- cmd/csaf_aggregator/mirror.go | 4 ++-- cmd/csaf_aggregator/processor.go | 4 ++-- cmd/csaf_checker/config.go | 8 ++++---- cmd/csaf_checker/links.go | 2 +- cmd/csaf_checker/main.go | 2 +- cmd/csaf_checker/processor.go | 4 ++-- cmd/csaf_checker/report.go | 4 ++-- cmd/csaf_checker/reporters.go | 2 +- cmd/csaf_checker/roliecheck.go | 4 ++-- cmd/csaf_checker/rules.go | 2 +- cmd/csaf_downloader/config.go | 8 ++++---- cmd/csaf_downloader/downloader.go | 4 ++-- cmd/csaf_downloader/forwarder.go | 4 ++-- cmd/csaf_downloader/forwarder_test.go | 4 ++-- cmd/csaf_downloader/main.go | 2 +- cmd/csaf_provider/actions.go | 4 ++-- cmd/csaf_provider/config.go | 2 +- cmd/csaf_provider/create.go | 4 ++-- cmd/csaf_provider/files.go | 2 +- cmd/csaf_provider/indices.go | 2 +- cmd/csaf_provider/main.go | 2 +- cmd/csaf_provider/rolie.go | 4 ++-- cmd/csaf_provider/transaction.go | 4 ++-- cmd/csaf_uploader/config.go | 4 ++-- cmd/csaf_uploader/main.go | 2 +- cmd/csaf_uploader/processor.go | 6 +++--- cmd/csaf_validator/main.go | 4 ++-- csaf/advisories.go | 2 +- csaf/models.go | 2 +- csaf/providermetaloader.go | 2 +- csaf/rolie.go | 2 +- csaf/summary.go | 2 +- docs/csaf_checker.md | 2 +- docs/csaf_provider.md | 4 ++-- docs/provider-setup.md | 2 +- docs/scripts/Readme.md | 8 ++++---- docs/scripts/TLSClientConfigsForITest.sh | 2 +- docs/scripts/TLSConfigsForITest.sh | 2 +- docs/scripts/prepareUbuntuInstanceForITests.sh | 2 +- docs/scripts/testAggregator.sh | 2 +- docs/scripts/testChecker.sh | 2 +- docs/scripts/testDownloader.sh | 2 +- examples/README.md | 2 +- examples/purls_searcher/main.go | 6 +++--- go.mod | 2 +- internal/options/options.go | 2 +- 58 files changed, 102 insertions(+), 102 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index 364c330..9cc4c6b 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -25,7 +25,7 @@ jobs: sudo apt install -y make nginx fcgiwrap gnutls-bin cp -r $GITHUB_WORKSPACE ~ cd ~ - cd csaf_distribution/docs/scripts/ + cd csaf/docs/scripts/ # keep in sync with docs/scripts/Readme.md export FOLDERNAME=devca1 ORGANAME="CSAF Tools Development (internal)" source ./TLSConfigsForITest.sh diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 739f45c..4bcd6ba 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -24,5 +24,5 @@ jobs: uses: softprops/action-gh-release@v1 with: files: | - dist/csaf_distribution-*.zip - dist/csaf_distribution-*.tar.gz + dist/csaf-*.zip + dist/csaf-*.tar.gz diff --git a/Makefile b/Makefile index b4b3964..083d3b6 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ # SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) # Software-Engineering: 2021 Intevation GmbH # -# Makefile to build csaf_distribution components +# Makefile to build csaf components SHELL = /bin/bash BUILD = go build @@ -59,7 +59,7 @@ testsemver: # Set -ldflags parameter to pass the semversion. -LDFLAGS = -ldflags "-X github.com/csaf-poc/csaf_distribution/v3/util.SemVersion=$(SEMVER)" +LDFLAGS = -ldflags "-X github.com/gocsaf/csaf/v3/util.SemVersion=$(SEMVER)" # Build binaries and place them under bin-$(GOOS)-$(GOARCH) # Using 'Target-specific Variable Values' to specify the build target system @@ -78,7 +78,7 @@ build_linux build_win build_mac_amd64 build_mac_arm64: env GOARCH=$(GOARCH) GOOS=$(GOOS) $(BUILD) -o $(BINDIR) $(LDFLAGS) -v ./cmd/... -DISTDIR := csaf_distribution-$(SEMVER) +DISTDIR := csaf-$(SEMVER) dist: build_linux build_win build_mac_amd64 build_mac_arm64 mkdir -p dist mkdir -p dist/$(DISTDIR)-windows-amd64/bin-windows-amd64 diff --git a/README.md b/README.md index 568bf03..cec9248 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ > and redirection will be switched off a few months later.) -# csaf_distribution +# csaf Implements a [CSAF](https://csaf.io/) ([specification v2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) @@ -52,10 +52,10 @@ is a CSAF Aggregator, to list or mirror providers. ## Other stuff ### [examples](./examples/README.md) -are small examples of how to use `github.com/csaf-poc/csaf_distribution` +are small examples of how to use `github.com/gocsaf/csaf` as an API. Currently this is a work in progress, as usage of this repository as a library to access is _not officially supported_, e.g. -see https://github.com/csaf-poc/csaf_distribution/issues/367 . +see https://github.com/gocsaf/csaf/issues/367 . ## Setup Binaries for the server side are only available and tested @@ -81,7 +81,7 @@ Download the binaries from the most recent release assets on Github. - A recent version of **Go** (1.22+) should be installed. [Go installation](https://go.dev/doc/install) -- Clone the repository `git clone https://github.com/csaf-poc/csaf_distribution.git ` +- Clone the repository `git clone https://github.com/gocsaf/csaf.git ` - Build Go components Makefile supplies the following targets: - Build for GNU/Linux system: `make build_linux` @@ -110,7 +110,7 @@ For further details of the development process consult our [development page](./ ## License -- `csaf_distribution` is licensed as Free Software under the terms of the [Apache License, Version 2.0](./LICENSES/Apache-2.0.txt). +- `csaf` is licensed as Free Software under the terms of the [Apache License, Version 2.0](./LICENSES/Apache-2.0.txt). - See the specific source files for details, the license itself can be found in the directory `LICENSES/`. diff --git a/cmd/csaf_aggregator/client.go b/cmd/csaf_aggregator/client.go index 8200d34..916baa5 100644 --- a/cmd/csaf_aggregator/client.go +++ b/cmd/csaf_aggregator/client.go @@ -13,7 +13,7 @@ import ( "io" "net/http" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) var errNotFound = errors.New("not found") diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index b73286c..81db0b7 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -20,12 +20,12 @@ import ( "time" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/filter" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/filter" + "github.com/gocsaf/csaf/v3/internal/models" + "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/util" "golang.org/x/time/rate" ) diff --git a/cmd/csaf_aggregator/full.go b/cmd/csaf_aggregator/full.go index 9ec9812..e71d7b6 100644 --- a/cmd/csaf_aggregator/full.go +++ b/cmd/csaf_aggregator/full.go @@ -18,8 +18,8 @@ import ( "sync" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type fullJob struct { diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 272d25b..17c8d3a 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -19,8 +19,8 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) const ( diff --git a/cmd/csaf_aggregator/interim.go b/cmd/csaf_aggregator/interim.go index 023c9c4..94147bc 100644 --- a/cmd/csaf_aggregator/interim.go +++ b/cmd/csaf_aggregator/interim.go @@ -24,8 +24,8 @@ import ( "sync" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type interimJob struct { diff --git a/cmd/csaf_aggregator/lazytransaction.go b/cmd/csaf_aggregator/lazytransaction.go index 606d892..af36ee2 100644 --- a/cmd/csaf_aggregator/lazytransaction.go +++ b/cmd/csaf_aggregator/lazytransaction.go @@ -13,7 +13,7 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type lazyTransaction struct { diff --git a/cmd/csaf_aggregator/lister.go b/cmd/csaf_aggregator/lister.go index 4d758e4..7e1fb58 100644 --- a/cmd/csaf_aggregator/lister.go +++ b/cmd/csaf_aggregator/lister.go @@ -11,8 +11,8 @@ package main import ( "fmt" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // mirrorAllowed checks if mirroring is allowed. diff --git a/cmd/csaf_aggregator/main.go b/cmd/csaf_aggregator/main.go index 39c1051..2056e84 100644 --- a/cmd/csaf_aggregator/main.go +++ b/cmd/csaf_aggregator/main.go @@ -15,7 +15,7 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/options" "github.com/gofrs/flock" ) diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 6bf72a3..c90ef68 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -30,8 +30,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/constants" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // mirrorAllowed checks if mirroring is allowed. diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index 5cb3628..b22e839 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -14,8 +14,8 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" "github.com/ProtonMail/gopenpgp/v2/crypto" ) diff --git a/cmd/csaf_checker/config.go b/cmd/csaf_checker/config.go index ac9ce62..3ea1840 100644 --- a/cmd/csaf_checker/config.go +++ b/cmd/csaf_checker/config.go @@ -13,10 +13,10 @@ import ( "fmt" "net/http" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/filter" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/filter" + "github.com/gocsaf/csaf/v3/internal/models" + "github.com/gocsaf/csaf/v3/internal/options" ) type outputFormat string diff --git a/cmd/csaf_checker/links.go b/cmd/csaf_checker/links.go index 0456ace..a323661 100644 --- a/cmd/csaf_checker/links.go +++ b/cmd/csaf_checker/links.go @@ -15,7 +15,7 @@ import ( "github.com/PuerkitoBio/goquery" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type ( diff --git a/cmd/csaf_checker/main.go b/cmd/csaf_checker/main.go index 752fdf8..4efb351 100644 --- a/cmd/csaf_checker/main.go +++ b/cmd/csaf_checker/main.go @@ -12,7 +12,7 @@ package main import ( "log" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/options" ) // run uses a processor to check all the given domains or direct urls diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index da4214b..5fd3fbd 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -32,8 +32,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/time/rate" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // topicMessages stores the collected topicMessages for a specific topic. diff --git a/cmd/csaf_checker/report.go b/cmd/csaf_checker/report.go index 9b5251b..58ed25a 100644 --- a/cmd/csaf_checker/report.go +++ b/cmd/csaf_checker/report.go @@ -18,8 +18,8 @@ import ( "os" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/models" ) // MessageType is the kind of the message. diff --git a/cmd/csaf_checker/reporters.go b/cmd/csaf_checker/reporters.go index 016d371..157eabe 100644 --- a/cmd/csaf_checker/reporters.go +++ b/cmd/csaf_checker/reporters.go @@ -13,7 +13,7 @@ import ( "sort" "strings" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type ( diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index 53d1150..28bd437 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -15,8 +15,8 @@ import ( "sort" "strings" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // identifier consist of document/tracking/id and document/publisher/namespace, diff --git a/cmd/csaf_checker/rules.go b/cmd/csaf_checker/rules.go index eadbbb2..e04388d 100644 --- a/cmd/csaf_checker/rules.go +++ b/cmd/csaf_checker/rules.go @@ -12,7 +12,7 @@ import ( "fmt" "sort" - "github.com/csaf-poc/csaf_distribution/v3/csaf" + "github.com/gocsaf/csaf/v3/csaf" ) type ruleCondition int diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index dcfc090..33f8dc2 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -19,10 +19,10 @@ import ( "path/filepath" "time" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/filter" - "github.com/csaf-poc/csaf_distribution/v3/internal/models" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/filter" + "github.com/gocsaf/csaf/v3/internal/models" + "github.com/gocsaf/csaf/v3/internal/options" ) const ( diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index fde4cd3..f21fcc0 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -33,8 +33,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/time/rate" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type downloader struct { diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index c3681eb..12d9fe4 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -19,8 +19,8 @@ import ( "path/filepath" "strings" - "github.com/csaf-poc/csaf_distribution/v3/internal/misc" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/internal/misc" + "github.com/gocsaf/csaf/v3/util" ) // failedForwardDir is the name of the special sub folder diff --git a/cmd/csaf_downloader/forwarder_test.go b/cmd/csaf_downloader/forwarder_test.go index 907bbce..25f0f1f 100644 --- a/cmd/csaf_downloader/forwarder_test.go +++ b/cmd/csaf_downloader/forwarder_test.go @@ -23,8 +23,8 @@ import ( "strings" "testing" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/util" ) func TestValidationStatusUpdate(t *testing.T) { diff --git a/cmd/csaf_downloader/main.go b/cmd/csaf_downloader/main.go index cc284bb..fe6efd1 100644 --- a/cmd/csaf_downloader/main.go +++ b/cmd/csaf_downloader/main.go @@ -15,7 +15,7 @@ import ( "os" "os/signal" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/options" ) func run(cfg *config, domains []string) error { diff --git a/cmd/csaf_provider/actions.go b/cmd/csaf_provider/actions.go index 8f385e6..1862983 100644 --- a/cmd/csaf_provider/actions.go +++ b/cmd/csaf_provider/actions.go @@ -26,8 +26,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/constants" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) const dateFormat = time.RFC3339 diff --git a/cmd/csaf_provider/config.go b/cmd/csaf_provider/config.go index 49a7204..826b7bf 100644 --- a/cmd/csaf_provider/config.go +++ b/cmd/csaf_provider/config.go @@ -18,7 +18,7 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/crypto/bcrypt" - "github.com/csaf-poc/csaf_distribution/v3/csaf" + "github.com/gocsaf/csaf/v3/csaf" ) const ( diff --git a/cmd/csaf_provider/create.go b/cmd/csaf_provider/create.go index 56893c6..11e0b7c 100644 --- a/cmd/csaf_provider/create.go +++ b/cmd/csaf_provider/create.go @@ -22,8 +22,8 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // ensureFolders initializes the paths and call functions to create diff --git a/cmd/csaf_provider/files.go b/cmd/csaf_provider/files.go index 39a97e3..3b99ff5 100644 --- a/cmd/csaf_provider/files.go +++ b/cmd/csaf_provider/files.go @@ -13,7 +13,7 @@ import ( "crypto/sha512" "os" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) func writeHashedFile(fname, name string, data []byte, armored string) error { diff --git a/cmd/csaf_provider/indices.go b/cmd/csaf_provider/indices.go index 805371b..a4eb97a 100644 --- a/cmd/csaf_provider/indices.go +++ b/cmd/csaf_provider/indices.go @@ -18,7 +18,7 @@ import ( "sort" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) func updateIndex(dir, fname string) error { diff --git a/cmd/csaf_provider/main.go b/cmd/csaf_provider/main.go index 8740e81..6c858c9 100644 --- a/cmd/csaf_provider/main.go +++ b/cmd/csaf_provider/main.go @@ -18,7 +18,7 @@ import ( "github.com/jessevdk/go-flags" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) type options struct { diff --git a/cmd/csaf_provider/rolie.go b/cmd/csaf_provider/rolie.go index 98448bd..d9717b1 100644 --- a/cmd/csaf_provider/rolie.go +++ b/cmd/csaf_provider/rolie.go @@ -15,8 +15,8 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) // mergeCategories merges the given categories into the old ones. diff --git a/cmd/csaf_provider/transaction.go b/cmd/csaf_provider/transaction.go index 1b66ae0..c4c93a8 100644 --- a/cmd/csaf_provider/transaction.go +++ b/cmd/csaf_provider/transaction.go @@ -12,8 +12,8 @@ import ( "os" "path/filepath" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) func doTransaction( diff --git a/cmd/csaf_uploader/config.go b/cmd/csaf_uploader/config.go index a83361c..ceecff7 100644 --- a/cmd/csaf_uploader/config.go +++ b/cmd/csaf_uploader/config.go @@ -18,8 +18,8 @@ import ( "golang.org/x/crypto/bcrypt" "golang.org/x/term" - "github.com/csaf-poc/csaf_distribution/v3/internal/certs" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/certs" + "github.com/gocsaf/csaf/v3/internal/options" ) const ( diff --git a/cmd/csaf_uploader/main.go b/cmd/csaf_uploader/main.go index 20f89fd..db1cef4 100644 --- a/cmd/csaf_uploader/main.go +++ b/cmd/csaf_uploader/main.go @@ -9,7 +9,7 @@ // Implements a command line tool that uploads csaf documents to csaf_provider. package main -import "github.com/csaf-poc/csaf_distribution/v3/internal/options" +import "github.com/gocsaf/csaf/v3/internal/options" func main() { args, cfg, err := parseArgsConfig() diff --git a/cmd/csaf_uploader/processor.go b/cmd/csaf_uploader/processor.go index 4598865..b57cafb 100644 --- a/cmd/csaf_uploader/processor.go +++ b/cmd/csaf_uploader/processor.go @@ -26,9 +26,9 @@ import ( "github.com/ProtonMail/gopenpgp/v2/constants" "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/internal/misc" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/misc" + "github.com/gocsaf/csaf/v3/util" ) type processor struct { diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index f6aecc4..b07c2f4 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -18,8 +18,8 @@ import ( "github.com/jessevdk/go-flags" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) type options struct { diff --git a/csaf/advisories.go b/csaf/advisories.go index 6f07648..c51c84c 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -19,7 +19,7 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // AdvisoryFile constructs the urls of a remote file. diff --git a/csaf/models.go b/csaf/models.go index c7e507d..c4b132d 100644 --- a/csaf/models.go +++ b/csaf/models.go @@ -17,7 +17,7 @@ import ( "strings" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // TLPLabel is the traffic light policy of the CSAF. diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index b21ddc6..b28b606 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -18,7 +18,7 @@ import ( "net/http" "strings" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // ProviderMetadataLoader helps load provider-metadata.json from diff --git a/csaf/rolie.go b/csaf/rolie.go index c2b5b08..b94cfa3 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -14,7 +14,7 @@ import ( "sort" "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) // ROLIEServiceWorkspaceCollectionCategoriesCategory is a category in a ROLIE service collection. diff --git a/csaf/summary.go b/csaf/summary.go index 72d2faf..b10dd65 100644 --- a/csaf/summary.go +++ b/csaf/summary.go @@ -11,7 +11,7 @@ package csaf import ( "time" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) const ( diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index 0b223b6..5152501 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -93,7 +93,7 @@ ignorepattern = [".*white.*", ".*red.*"] The `role` given in the `provider-metadata.json` is not yet considered to change the overall result, -see . +see . If a provider hosts one or more advisories with a TLP level of AMBER or RED, then these advisories must be access protected. To check these advisories, authorization can be given via custom headers or certificates. diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index 81a45fa..b88924d 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -141,5 +141,5 @@ contact_details = "Example Company can be reached at contact_us@example.com, or There is an experimental upload interface which works with a web browser. It is disabled by default, as there are known issues, notably: - * https://github.com/csaf-poc/csaf_distribution/issues/43 - * https://github.com/csaf-poc/csaf_distribution/issues/256 + * https://github.com/gocsaf/csaf/issues/43 + * https://github.com/gocsaf/csaf/issues/256 diff --git a/docs/provider-setup.md b/docs/provider-setup.md index 3f07fd0..48c29d0 100644 --- a/docs/provider-setup.md +++ b/docs/provider-setup.md @@ -115,7 +115,7 @@ sudo chmod g+r,o-rwx /etc/csaf/config.toml Here is a minimal example configuration, which you need to customize for a production setup, -see the [options of `csaf_provider`](https://github.com/csaf-poc/csaf_distribution/blob/main/docs/csaf_provider.md). +see the [options of `csaf_provider`](https://github.com/gocsaf/csaf/blob/main/docs/csaf_provider.md). diff --git a/docs/scripts/Readme.md b/docs/scripts/Readme.md index 95f39b2..77e8dae 100644 --- a/docs/scripts/Readme.md +++ b/docs/scripts/Readme.md @@ -1,7 +1,7 @@ Scripts for assisting the Integration tests. They were written on Ubuntu 20.04 TLS amd64 and also tested with 24.04 TLS. -- `prepareUbuntuInstanceForITests.sh` installs the required packages for the csaf_distribution integration tests on a naked ubuntu LTS amd64. +- `prepareUbuntuInstanceForITests.sh` installs the required packages for the csaf integration tests on a naked Ubuntu LTS amd64. - `TLSConfigsForITest.sh` generates a root CA and webserver cert by running `createRootCAForITest.sh` and `createWebserverCertForITest.sh` and configures nginx for serving TLS connections. @@ -14,11 +14,11 @@ As creating the folders needs to authenticate with the csaf_provider, the config Calling example (as user with sudo privileges): ``` bash - curl --fail -O https://raw.githubusercontent.com/csaf-poc/csaf_distribution/main/docs/scripts/prepareUbuntuInstanceForITests.sh + curl --fail -O https://raw.githubusercontent.com/gocsaf/csaf/main/docs/scripts/prepareUbuntuInstanceForITests.sh sudo bash prepareUbuntuInstanceForITests.sh - git clone https://github.com/csaf-poc/csaf_distribution.git # --branch - pushd csaf_distribution/docs/scripts/ + git clone https://github.com/gocsaf/csaf.git # --branch + pushd csaf/docs/scripts/ export FOLDERNAME=devca1 ORGANAME="CSAF Tools Development (internal)" source ./TLSConfigsForITest.sh diff --git a/docs/scripts/TLSClientConfigsForITest.sh b/docs/scripts/TLSClientConfigsForITest.sh index 1f94117..830666f 100755 --- a/docs/scripts/TLSClientConfigsForITest.sh +++ b/docs/scripts/TLSClientConfigsForITest.sh @@ -18,7 +18,7 @@ set -e NGINX_CONFIG_PATH=/etc/nginx/sites-available/default -cd ~/csaf_distribution/docs/scripts/ +cd ~/csaf/docs/scripts/ source ./createCCForITest.sh echo ' diff --git a/docs/scripts/TLSConfigsForITest.sh b/docs/scripts/TLSConfigsForITest.sh index c1a5420..d7c06f9 100644 --- a/docs/scripts/TLSConfigsForITest.sh +++ b/docs/scripts/TLSConfigsForITest.sh @@ -17,7 +17,7 @@ set -e NGINX_CONFIG_PATH=/etc/nginx/sites-available/default -cd ~/csaf_distribution/docs/scripts/ +cd ~/csaf/docs/scripts/ ## Create Root CA ./createRootCAForITest.sh diff --git a/docs/scripts/prepareUbuntuInstanceForITests.sh b/docs/scripts/prepareUbuntuInstanceForITests.sh index ea88fc4..75ce44b 100755 --- a/docs/scripts/prepareUbuntuInstanceForITests.sh +++ b/docs/scripts/prepareUbuntuInstanceForITests.sh @@ -2,7 +2,7 @@ set -e # This script prepares a naked Ubuntu LTS amd64 -# for the csaf_distribution integration tests +# for the csaf integration tests # by installing the required packages. apt update diff --git a/docs/scripts/testAggregator.sh b/docs/scripts/testAggregator.sh index 366ac07..f6322f6 100755 --- a/docs/scripts/testAggregator.sh +++ b/docs/scripts/testAggregator.sh @@ -29,6 +29,6 @@ popd echo echo '=== run aggregator' -cd ~/csaf_distribution/ +cd ~/csaf/ sudo cp docs/examples/aggregator.toml /etc/csaf sudo ./bin-linux-amd64/csaf_aggregator -c /etc/csaf/aggregator.toml diff --git a/docs/scripts/testChecker.sh b/docs/scripts/testChecker.sh index cb45aad..28474d0 100755 --- a/docs/scripts/testChecker.sh +++ b/docs/scripts/testChecker.sh @@ -11,7 +11,7 @@ set -e # to exit if a command in the script fails echo '==== run checker (twice)' -cd ~/csaf_distribution +cd ~/csaf ./bin-linux-amd64/csaf_checker -f html -o ../checker-results.html --insecure \ --client_cert ~/devca1/testclient1.crt \ diff --git a/docs/scripts/testDownloader.sh b/docs/scripts/testDownloader.sh index c4b9bce..6326536 100755 --- a/docs/scripts/testDownloader.sh +++ b/docs/scripts/testDownloader.sh @@ -10,7 +10,7 @@ set -e # to exit if a command in the script fails -cd ~/csaf_distribution +cd ~/csaf echo echo '==== run downloader (1)' diff --git a/examples/README.md b/examples/README.md index a70ea09..c525e96 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,6 +1,6 @@ # API examples -An experimental example of how to use `github.com/csaf-poc/csaf_distribution` +An experimental example of how to use `github.com/gocsaf/csaf` as a library. As usage of the repository as an API is currently a _work in progress_, these examples are likely to be changed. diff --git a/examples/purls_searcher/main.go b/examples/purls_searcher/main.go index c1ec3e1..72fb976 100644 --- a/examples/purls_searcher/main.go +++ b/examples/purls_searcher/main.go @@ -1,5 +1,5 @@ // Package main implements a simple demo program to -// work with the csaf_distribution library. +// work with the csaf library. package main import ( @@ -9,8 +9,8 @@ import ( "os" "strings" - "github.com/csaf-poc/csaf_distribution/v3/csaf" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/util" ) func main() { diff --git a/go.mod b/go.mod index 52f1f02..c8101f0 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/csaf-poc/csaf_distribution/v3 +module github.com/gocsaf/csaf/v3 go 1.22 diff --git a/internal/options/options.go b/internal/options/options.go index c0ad2bc..3a4867f 100644 --- a/internal/options/options.go +++ b/internal/options/options.go @@ -15,7 +15,7 @@ import ( "log/slog" "os" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" "github.com/BurntSushi/toml" "github.com/jessevdk/go-flags" From ace8aeaf985517cca2d3ba4b4a17db4e0f048021 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 15 Nov 2024 11:46:26 +0100 Subject: [PATCH 112/235] fix: build-in version for release tags * Change Makefile to remove the leading `v` from the git tag in the case of release tags. Previously this was only done for pre-release git tags. --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 083d3b6..04ec866 100644 --- a/Makefile +++ b/Makefile @@ -47,13 +47,13 @@ tag_checked_out: # In this case we might in some situations see an error like # `/bin/bash: line 1: 2b55bbb: value too great for base (error token is "2b55bbb")` # which can be ignored. -GITDESC := $(shell git describe --tags --always) -GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/v?[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') +GITDESC := $(shell git describe --tags --always | sed -E 's/^v//') +GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') SEMVERPATCH := $(shell echo $$(( $(GITDESCPATCH) + 1 ))) # Hint: The regexp in the next line only matches if there is a hyphen (`-`) # followed by a number, by which we assume that git describe # has added a string after the tag -SEMVER := $(shell echo '$(GITDESC)' | sed -E 's/v?([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) +SEMVER := $(shell echo '$(GITDESC)' | sed -E 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) testsemver: @echo from \'$(GITDESC)\' transformed to \'$(SEMVER)\' From 1e3504c7539fd6dac3e7ffdb2c35cb1111153299 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 15 Nov 2024 12:12:24 +0100 Subject: [PATCH 113/235] improve Makefile improvement --- Makefile | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Makefile b/Makefile index 04ec866..163ace5 100644 --- a/Makefile +++ b/Makefile @@ -41,19 +41,19 @@ tag_checked_out: # into a semver version. For this we increase the PATCH number, so that # any commit after a tag is considered newer than the semver from the tag # without an optional 'v' -# Note we need `--tags` because github release only creates lightweight tags +# Note we need `--tags` because github releases only create lightweight tags # (see feature request https://github.com/github/feedback/discussions/4924). # We use `--always` in case of being run as github action with shallow clone. # In this case we might in some situations see an error like # `/bin/bash: line 1: 2b55bbb: value too great for base (error token is "2b55bbb")` # which can be ignored. -GITDESC := $(shell git describe --tags --always | sed -E 's/^v//') -GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') +GITDESC := $(shell git describe --tags --always) +GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/v?[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') SEMVERPATCH := $(shell echo $$(( $(GITDESCPATCH) + 1 ))) -# Hint: The regexp in the next line only matches if there is a hyphen (`-`) -# followed by a number, by which we assume that git describe -# has added a string after the tag -SEMVER := $(shell echo '$(GITDESC)' | sed -E 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) +# Hint: The second regexp in the next line only matches +# if there is a hyphen (`-`) followed by a number, +# by which we assume that git describe has added a string after the tag +SEMVER := $(shell echo '$(GITDESC)' | sed -E -e 's/^v//' -e 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) testsemver: @echo from \'$(GITDESC)\' transformed to \'$(SEMVER)\' From c00dc36547e433f52d6dbcbf5345d6cc534c2d8a Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Thu, 21 Nov 2024 12:31:58 +0100 Subject: [PATCH 114/235] Remove `-h` for preferred hash configuration This option was in conflict with the help display. --- cmd/csaf_downloader/config.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 71c5055..619cce1 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -88,7 +88,7 @@ type config struct { ignorePattern filter.PatternMatcher //lint:ignore SA5008 We are using choice or than once: sha256, sha512 - PreferredHash hashAlgorithm `long:"preferred_hash" short:"h" choice:"sha256" choice:"sha512" value-name:"HASH" description:"HASH to prefer" toml:"preferred_hash"` + PreferredHash hashAlgorithm `long:"preferred_hash" choice:"sha256" choice:"sha512" value-name:"HASH" description:"HASH to prefer" toml:"preferred_hash"` } // configPaths are the potential file locations of the config file. From de047b76829f898ba9e22be99ca384dc0ddc7563 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Thu, 21 Nov 2024 12:53:07 +0100 Subject: [PATCH 115/235] Feat: Add prefered hash to downloader docs --- docs/csaf_downloader.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index fcf6634..6335366 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -32,6 +32,7 @@ Application Options: --logfile=FILE FILE to log downloading to (default: downloader.log) --loglevel=LEVEL[debug|info|warn|error] LEVEL of logging details (default: info) -c, --config=TOML-FILE Path to config TOML file + --preferred_hash=HASH[sha256|sha512] HASH to prefer Help Options: -h, --help Show this help message From 01645f55598e01e891c1a146eda6b9817b2e9c9c Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Thu, 21 Nov 2024 12:49:02 +0100 Subject: [PATCH 116/235] Fix: Update downloader docs --- docs/csaf_downloader.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index 07c6e63..04f93b2 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -22,6 +22,7 @@ Application Options: -f, --folder=FOLDER Download into a given subFOLDER -i, --ignore_pattern=PATTERN Do not download files if their URLs match any of the given PATTERNs -H, --header= One or more extra HTTP header fields + --enumerate_pmd_only If this flag is set to true, the downloader will only enumerate valid provider metadata files, but not download documents --validator=URL URL to validate documents remotely --validator_cache=FILE FILE to cache remote validations --validator_preset=PRESETS One or more PRESETS to validate remotely (default: [mandatory]) @@ -30,8 +31,8 @@ Application Options: --forward_header= One or more extra HTTP header fields used by forwarding --forward_queue=LENGTH Maximal queue LENGTH before forwarder (default: 5) --forward_insecure Do not check TLS certificates from forward endpoint - --logfile=FILE FILE to log downloading to (default: downloader.log) - --loglevel=LEVEL[debug|info|warn|error] LEVEL of logging details (default: info) + --log_file=FILE FILE to log downloading to (default: downloader.log) + --log_level=LEVEL[debug|info|warn|error] LEVEL of logging details (default: info) -c, --config=TOML-FILE Path to config TOML file Help Options: From fe4f01d06255e67db2c5ee3f6f3e9a1453b2dea0 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Fri, 22 Nov 2024 14:52:56 +0100 Subject: [PATCH 117/235] fix: Link to file was not working (#592) --- docs/csaf_provider.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index b88924d..cb27f9f 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -4,7 +4,7 @@ The [setup docs](../README.md#setup-trusted-provider) explain how to wire this up with nginx and where the config file lives. When installed, two endpoints are offered, -and you should use the [csaf_uploader](../docs/csaf_uploader) +and you should use the [csaf_uploader](../docs/csaf_uploader.md) to access them: ### /api/create From f6d7589fde4b7208572d6a0781dd0624ecbbe582 Mon Sep 17 00:00:00 2001 From: koplas <54645365+koplas@users.noreply.github.com> Date: Fri, 22 Nov 2024 15:58:41 +0100 Subject: [PATCH 118/235] Add required upload permissions --- .github/workflows/release.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4bcd6ba..d1e370f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,6 +8,8 @@ jobs: releases-matrix: name: Release Go binaries runs-on: ubuntu-20.04 + permissions: + contents: write steps: - name: Checkout uses: actions/checkout@v3 From 9495d8b1c38ac814f10fd29762e509ed849203db Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Fri, 22 Nov 2024 16:10:54 +0100 Subject: [PATCH 119/235] Update Go 3rd party libs --- go.mod | 30 +++++++++++----------- go.sum | 80 +++++++++++++++++++++++----------------------------------- 2 files changed, 47 insertions(+), 63 deletions(-) diff --git a/go.mod b/go.mod index c8101f0..1ef2216 100644 --- a/go.mod +++ b/go.mod @@ -1,31 +1,31 @@ module github.com/gocsaf/csaf/v3 -go 1.22 +go 1.22.9 require ( - github.com/BurntSushi/toml v1.3.2 + github.com/BurntSushi/toml v1.4.0 github.com/Intevation/gval v1.3.0 github.com/Intevation/jsonpath v0.2.1 - github.com/ProtonMail/gopenpgp/v2 v2.7.4 + github.com/ProtonMail/gopenpgp/v2 v2.8.0 github.com/PuerkitoBio/goquery v1.8.1 - github.com/gofrs/flock v0.8.1 - github.com/jessevdk/go-flags v1.5.0 + github.com/gofrs/flock v0.12.1 + github.com/jessevdk/go-flags v1.6.1 github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 - go.etcd.io/bbolt v1.3.8 - golang.org/x/crypto v0.14.0 - golang.org/x/term v0.13.0 - golang.org/x/time v0.3.0 + go.etcd.io/bbolt v1.3.11 + golang.org/x/crypto v0.29.0 + golang.org/x/term v0.26.0 + golang.org/x/time v0.8.0 ) require ( - github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c // indirect + github.com/ProtonMail/go-crypto v1.1.2 // indirect github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect github.com/andybalholm/cascadia v1.3.2 // indirect - github.com/cloudflare/circl v1.3.6 // indirect + github.com/cloudflare/circl v1.5.0 // indirect github.com/pkg/errors v0.9.1 // indirect - github.com/shopspring/decimal v1.3.1 // indirect - golang.org/x/net v0.17.0 // indirect - golang.org/x/sys v0.14.0 // indirect - golang.org/x/text v0.13.0 // indirect + github.com/shopspring/decimal v1.4.0 // indirect + golang.org/x/net v0.31.0 // indirect + golang.org/x/sys v0.27.0 // indirect + golang.org/x/text v0.20.0 // indirect ) diff --git a/go.sum b/go.sum index f81653d..47637e9 100644 --- a/go.sum +++ b/go.sum @@ -1,34 +1,30 @@ -github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= -github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0= +github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/Intevation/gval v1.3.0 h1:+Ze5sft5MmGbZrHj06NVUbcxCb67l9RaPTLMNr37mjw= github.com/Intevation/gval v1.3.0/go.mod h1:xmGyGpP5be12EL0P12h+dqiYG8qn2j3PJxIgkoOHO5o= github.com/Intevation/jsonpath v0.2.1 h1:rINNQJ0Pts5XTFEG+zamtdL7l9uuE1z0FBA+r55Sw+A= github.com/Intevation/jsonpath v0.2.1/go.mod h1:WnZ8weMmwAx/fAO3SutjYFU+v7DFreNYnibV7CiaYIw= -github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= -github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c h1:kMFnB0vCcX7IL/m9Y5LO+KQYv+t1CQOiFe6+SV2J7bE= -github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/ProtonMail/go-crypto v1.1.2 h1:A7JbD57ThNqh7XjmHE+PXpQ3Dqt3BrSAC0AL0Go3KS0= +github.com/ProtonMail/go-crypto v1.1.2/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ekTTXpdwKYF8eBlsYsDVoggDAuAjoK66k= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw= -github.com/ProtonMail/gopenpgp/v2 v2.7.4 h1:Vz/8+HViFFnf2A6XX8JOvZMrA6F5puwNvvF21O1mRlo= -github.com/ProtonMail/gopenpgp/v2 v2.7.4/go.mod h1:IhkNEDaxec6NyzSI0PlxapinnwPVIESk8/76da3Ct3g= +github.com/ProtonMail/gopenpgp/v2 v2.8.0 h1:WvMv3CMcFsqKSM4/Qf8sf3tgyQkzDqQmoSE49bnBuP4= +github.com/ProtonMail/gopenpgp/v2 v2.8.0/go.mod h1:qb2GUSnmA9ipBW5GVtCtEhkummSlqs2A8Ar3S0HBgSY= github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM= github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ= github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA= github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss= github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU= -github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= -github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= -github.com/cloudflare/circl v1.3.6 h1:/xbKIqSHbZXHwkhbrhrt2YOHIwYJlXH94E3tI/gDlUg= -github.com/cloudflare/circl v1.3.6/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys= +github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= -github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= +github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc= -github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= +github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4= +github.com/jessevdk/go-flags v1.6.1/go.mod h1:Mk8T1hIAWpOiJiHa9rJASDK2UGWji0EuPGBnNLMooyc= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -37,78 +33,66 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY= -github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= +github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= -go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= +go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0= +go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= -golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= -golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= -golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= -golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= -golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= -golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= +golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= +golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= -golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg= +golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= From 7824f3b48da9c868940936b3839483d15feaf8f3 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 22 Nov 2024 16:31:56 +0100 Subject: [PATCH 120/235] Improve hash fetching and logging --- cmd/csaf_downloader/config.go | 4 +- cmd/csaf_downloader/downloader.go | 108 +++++++++++++++++++++++------- 2 files changed, 84 insertions(+), 28 deletions(-) diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index 619cce1..a262ef7 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -44,8 +44,8 @@ const ( type hashAlgorithm string const ( - algSha256 = hashAlgorithm("SHA256") - algSha2512 = hashAlgorithm("SHA512") + algSha256 = hashAlgorithm("SHA256") + algSha512 = hashAlgorithm("SHA512") ) type config struct { diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 3cb7332..18fc1e8 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -25,6 +25,7 @@ import ( "os" "path" "path/filepath" + "slices" "strconv" "strings" "sync" @@ -37,6 +38,13 @@ import ( "github.com/csaf-poc/csaf_distribution/v3/util" ) +type hashFetchInfo struct { + url string + preferred bool + warn bool + hashType hashAlgorithm +} + type downloader struct { cfg *config keys *crypto.KeyRing @@ -496,35 +504,39 @@ nextAdvisory: signData []byte ) - if (d.cfg.PreferredHash != "sha512" || file.SHA512URL() == "") && file.SHA256URL() != "" { - // Only hash when we have a remote counterpart we can compare it with. - if remoteSHA256, s256Data, err = loadHash(client, file.SHA256URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA256", - "url", file.SHA256URL(), - "error", err) - } else { - slog.Info("SHA256 not present", "file", file.URL()) - } - } else { - s256 = sha256.New() - writers = append(writers, s256) + hashToFetch := []hashFetchInfo{} + if file.SHA512URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA512URL(), + warn: true, + hashType: algSha512, + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512))}) + } else { + slog.Info("SHA512 not present") + } + if file.SHA256URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA256URL(), + warn: true, + hashType: algSha256, + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256))}) + } else { + slog.Info("SHA256 not present") + } + if file.IsDirectory() { + for i := range hashToFetch { + hashToFetch[i].warn = false } } - if (d.cfg.PreferredHash != "sha256" || file.SHA256URL() == "") && file.SHA512URL() != "" { - if remoteSHA512, s512Data, err = loadHash(client, file.SHA512URL()); err != nil { - if !file.IsDirectory() { - slog.Warn("Cannot fetch SHA512", - "url", file.SHA512URL(), - "error", err) - } else { - slog.Info("SHA512 not present", "file", file.URL()) - } - } else { - s512 = sha512.New() - writers = append(writers, s512) - } + remoteSHA256, s256Data, remoteSHA512, s512Data = loadHashes(client, hashToFetch) + if remoteSHA512 != nil { + s512 = sha512.New() + writers = append(writers, s512) + } + if remoteSHA256 != nil { + s256 = sha256.New() + writers = append(writers, s256) } // Remember the data as we need to store it to file later. @@ -755,6 +767,50 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, return sign, data, nil } +func loadHashes(client util.Client, hashes []hashFetchInfo) ([]byte, []byte, []byte, []byte) { + var remoteSha256, remoteSha512, sha256Data, sha512Data []byte + + // Load preferred hashes first + slices.SortStableFunc(hashes, func(a, b hashFetchInfo) int { + if a.preferred == b.preferred { + return 0 + } + if a.preferred && !b.preferred { + return -1 + } + return 1 + }) + for _, h := range hashes { + if remote, data, err := loadHash(client, h.url); err != nil { + if h.warn { + slog.Warn("Cannot fetch hash", + "hash", h.hashType, + "url", h.url, + "error", err) + } else { + slog.Info("Hash not present", "hash", h.hashType, "file", h.url) + } + } else { + switch h.hashType { + case algSha512: + { + remoteSha512 = remote + sha512Data = data + } + case algSha256: + { + remoteSha256 = remote + sha256Data = data + } + } + if h.preferred { + break + } + } + } + return remoteSha256, sha256Data, remoteSha512, sha512Data +} + func loadHash(client util.Client, p string) ([]byte, []byte, error) { resp, err := client.Get(p) if err != nil { From b2180849e99f2b1df9dbc97a6b2d3c6d93fcc679 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Mon, 25 Nov 2024 09:38:13 +0100 Subject: [PATCH 121/235] Update README.md that go paths can be adjusted --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index cec9248..463b1d9 100644 --- a/README.md +++ b/README.md @@ -12,8 +12,8 @@ > [!IMPORTANT] > To avoid future breakage, if you still use `csaf-poc`: > 1. Adjust your HTML links. -> 2. Prepare to adjust your go module paths once https://github.com/gocsaf/csaf/issues/579 is solved. -> +> 2. Adjust your go module paths, see [#579](https://github.com/gocsaf/csaf/issues/579#issuecomment-2497244379). +> > (This repository was moved here on 2024-10-28. The old one is deprecated > and redirection will be switched off a few months later.) From a167bf65ad14acb142dba288529ee760799f338d Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 25 Nov 2024 14:27:56 +0100 Subject: [PATCH 122/235] Add Apache 2.0 license to root folder This allows other programs like google/licensecheck to correctly detect the license. This is required to display the documentation in `pkg.go.dev`. --- LICENSE-Apache-2.0.txt | 73 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 LICENSE-Apache-2.0.txt diff --git a/LICENSE-Apache-2.0.txt b/LICENSE-Apache-2.0.txt new file mode 100644 index 0000000..137069b --- /dev/null +++ b/LICENSE-Apache-2.0.txt @@ -0,0 +1,73 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. From ffb4eff933fef6c222dd131e90675152589c8003 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 27 Nov 2024 12:15:21 +0100 Subject: [PATCH 123/235] Merge unittest into sha-handling commit 990c74a1a64cf8688a7fd14ebb524ce96a320eef Merge: 86d7ce1 7824f3b Author: koplas Date: Fri Nov 22 16:58:46 2024 +0100 Merge branch 'sha-handling' into unittest commit 86d7ce13dcf1ff2250f27b5e9b811da38937fff5 Merge: a6807d2 79b8900 Author: koplas Date: Fri Nov 22 16:54:45 2024 +0100 Merge branch 'sha-handling' into unittest commit 79b89009dd7f5dd748ccedc0ea87ea26e75b65d2 Author: koplas Date: Fri Nov 22 16:31:56 2024 +0100 Improve hash fetching and logging commit a6807d24d604cafa4e5d30d6ba9c948490d9f883 Merge: ddb5518 d18d2c3 Author: koplas Date: Fri Nov 22 16:51:55 2024 +0100 Merge branch 'sha-handling' into unittest commit d18d2c3bf17950dad276457136c2262988cca129 Author: koplas Date: Fri Nov 22 16:31:56 2024 +0100 Improve hash fetching and logging commit ddb5518c6d57adce14fb5f7665d219778e642c53 Author: koplas <54645365+koplas@users.noreply.github.com> Date: Tue Sep 17 10:45:25 2024 +0200 Extend SHA marking tests commit 13c94f4fa06c0ba3ca52f76b93934f6855d80e81 Author: koplas Date: Mon Sep 16 20:46:31 2024 +0200 Use temp directory for downloads commit 1819b4896babaf9bd1136e5846e07224fb201b18 Author: koplas Date: Mon Sep 16 20:37:55 2024 +0200 Fix rolie feed commit 989e3667bad4c10cb1a779d3a7efd526929dc002 Author: koplas Date: Mon Sep 16 20:23:22 2024 +0200 Fix provider-metadata.json commit 714735d74a159e1fd8f7e756673742708dc758d4 Author: koplas Date: Mon Sep 16 20:08:21 2024 +0200 Implement provider handler commit d488e3994749c3e7daf2c00f2a7952974a8dce49 Author: koplas Date: Mon Sep 16 16:26:37 2024 +0200 Add info about gpg key commit a9bf9da130a04fffbf00481930575d1b292d138f Author: koplas Date: Mon Sep 16 16:12:49 2024 +0200 Rename directory testdata commit 6ca6dfee25c947758fac0abfb28e10049809d3ec Author: koplas Date: Mon Sep 16 16:01:41 2024 +0200 Add initial downloader tests commit 20bee797c61a457c58b37c208f0540a5ed7d7468 Author: koplas Date: Mon Sep 16 15:58:31 2024 +0200 Fix: Remove unecessary error print commit 8e4e508073e6a8d34922295de35da42b4ea8a93a Author: koplas Date: Mon Sep 16 14:50:48 2024 +0200 Extend links test commit 3ba29f94de3eebc379adc021f40fd5cd0587b57d Author: koplas Date: Mon Sep 16 14:11:14 2024 +0200 Add initial directory feed testdata commit dee55aafd9052adcda28a231b04271d866d06dd7 Author: koplas <54645365+koplas@users.noreply.github.com> Date: Mon Sep 16 10:47:32 2024 +0200 Add initial testdata commit cd9338ae7279791db62e28e8f4b5cfe9cf370881 Author: koplas <54645365+koplas@users.noreply.github.com> Date: Thu Sep 12 15:54:42 2024 +0200 Add initial download unittests --- cmd/csaf_aggregator/client_test.go | 67 ++++++ cmd/csaf_checker/links_test.go | 80 ++++++- cmd/csaf_downloader/config.go | 4 +- cmd/csaf_downloader/downloader.go | 6 + cmd/csaf_downloader/downloader_test.go | 218 ++++++++++++++++++ csaf/providermetaloader.go | 2 +- .../openpgp/info.txt | 2 + .../openpgp/privkey.asc | 15 ++ .../openpgp/pubkey.asc | 13 ++ .../provider-metadata.json | 25 ++ .../simple-directory-provider/security.txt | 2 + .../avendor-advisory-0004-not-listed.json | 170 ++++++++++++++ .../white/avendor-advisory-0004.json | 170 ++++++++++++++ .../white/avendor-advisory-0004.json.asc | 7 + .../white/avendor-advisory-0004.json.sha256 | 1 + .../white/avendor-advisory-0004.json.sha512 | 1 + .../white/changes.csv | 1 + .../white/index.html | 6 + .../simple-directory-provider/white/index.txt | 1 + .../simple-rolie-provider/openpgp/info.txt | 2 + .../simple-rolie-provider/openpgp/privkey.asc | 15 ++ .../simple-rolie-provider/openpgp/pubkey.asc | 13 ++ .../provider-metadata.json | 33 +++ testdata/simple-rolie-provider/security.txt | 2 + testdata/simple-rolie-provider/service.json | 23 ++ .../white/avendor-advisory-0004.json | 170 ++++++++++++++ .../white/avendor-advisory-0004.json.asc | 7 + .../white/avendor-advisory-0004.json.sha256 | 1 + .../white/avendor-advisory-0004.json.sha512 | 1 + .../white/white-feed.json | 61 +++++ 30 files changed, 1115 insertions(+), 4 deletions(-) create mode 100644 cmd/csaf_aggregator/client_test.go create mode 100644 cmd/csaf_downloader/downloader_test.go create mode 100644 testdata/simple-directory-provider/openpgp/info.txt create mode 100644 testdata/simple-directory-provider/openpgp/privkey.asc create mode 100644 testdata/simple-directory-provider/openpgp/pubkey.asc create mode 100644 testdata/simple-directory-provider/provider-metadata.json create mode 100644 testdata/simple-directory-provider/security.txt create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 create mode 100644 testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 create mode 100644 testdata/simple-directory-provider/white/changes.csv create mode 100644 testdata/simple-directory-provider/white/index.html create mode 100644 testdata/simple-directory-provider/white/index.txt create mode 100644 testdata/simple-rolie-provider/openpgp/info.txt create mode 100644 testdata/simple-rolie-provider/openpgp/privkey.asc create mode 100644 testdata/simple-rolie-provider/openpgp/pubkey.asc create mode 100644 testdata/simple-rolie-provider/provider-metadata.json create mode 100644 testdata/simple-rolie-provider/security.txt create mode 100644 testdata/simple-rolie-provider/service.json create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 create mode 100644 testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 create mode 100644 testdata/simple-rolie-provider/white/white-feed.json diff --git a/cmd/csaf_aggregator/client_test.go b/cmd/csaf_aggregator/client_test.go new file mode 100644 index 0000000..c08b29a --- /dev/null +++ b/cmd/csaf_aggregator/client_test.go @@ -0,0 +1,67 @@ +// This file is Free Software under the MIT License +// without warranty, see README.md and LICENSES/MIT.txt for details. +// +// SPDX-License-Identifier: MIT +// +// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) +// Software-Engineering: 2022 Intevation GmbH + +package main + +import ( + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/csaf-poc/csaf_distribution/v3/util" +) + +func Test_downloadJSON(t *testing.T) { + tests := []struct { + name string + statusCode int + contentType string + wantErr error + }{ + { + name: "status ok, application/json", + statusCode: http.StatusOK, + contentType: "application/json", + wantErr: nil, + }, + { + name: "status found, application/json", + statusCode: http.StatusFound, + contentType: "application/json", + wantErr: errNotFound, + }, + { + name: "status ok, application/xml", + statusCode: http.StatusOK, + contentType: "application/xml", + wantErr: errNotFound, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + found := func(r io.Reader) error { + return nil + } + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Add("Content-Type", test.contentType) + w.WriteHeader(test.statusCode) + })) + defer server.Close() + hClient := http.Client{} + client := util.Client(&hClient) + if gotErr := downloadJSON(client, server.URL, found); gotErr != test.wantErr { + t.Errorf("downloadJSON: Expected %q but got %q.", test.wantErr, gotErr) + } + }) + } +} diff --git a/cmd/csaf_checker/links_test.go b/cmd/csaf_checker/links_test.go index 8abf4e6..aa04222 100644 --- a/cmd/csaf_checker/links_test.go +++ b/cmd/csaf_checker/links_test.go @@ -10,8 +10,12 @@ package main import ( "fmt" + "net/http" + "net/http/httptest" "strings" "testing" + + "github.com/csaf-poc/csaf_distribution/v3/util" ) const page0 = ` @@ -31,7 +35,6 @@ const page0 = ` ` func TestLinksOnPage(t *testing.T) { - var links []string err := linksOnPage( @@ -58,3 +61,78 @@ func TestLinksOnPage(t *testing.T) { } } } + +func Test_listed(t *testing.T) { + tests := []struct { + name string + badDirs util.Set[string] + path string + want bool + }{ + { + name: "listed path", + badDirs: util.Set[string]{}, + path: "/white/avendor-advisory-0004.json", + want: true, + }, + { + name: "badDirs contains path", + badDirs: util.Set[string]{"/white/": {}}, + path: "/white/avendor-advisory-0004.json", + want: false, + }, + { + name: "not found", + badDirs: util.Set[string]{}, + path: "/not-found/resource.json", + want: false, + }, + { + name: "badDirs does not contain path", + badDirs: util.Set[string]{"/bad-dir/": {}}, + path: "/white/avendor-advisory-0004.json", + want: true, + }, + { + name: "unlisted path", + badDirs: util.Set[string]{}, + path: "/white/avendor-advisory-0004-not-listed.json", + want: false, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + serverURL := "" + fs := http.FileServer(http.Dir("../../testdata/simple-directory-provider")) + server := httptest.NewTLSServer(fs) + defer server.Close() + + serverURL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + pgs := pages{} + cfg := config{RemoteValidator: "", RemoteValidatorCache: ""} + p, err := newProcessor(&cfg) + if err != nil { + t.Error(err) + } + p.client = client + + badDirs := util.Set[string]{} + for dir := range test.badDirs { + badDirs.Add(serverURL + dir) + } + + got, _ := pgs.listed(serverURL+test.path, p, badDirs) + if got != test.want { + t.Errorf("%q: Expected %t but got %t.", test.name, test.want, got) + } + }) + } +} diff --git a/cmd/csaf_downloader/config.go b/cmd/csaf_downloader/config.go index a262ef7..a44fa81 100644 --- a/cmd/csaf_downloader/config.go +++ b/cmd/csaf_downloader/config.go @@ -44,8 +44,8 @@ const ( type hashAlgorithm string const ( - algSha256 = hashAlgorithm("SHA256") - algSha512 = hashAlgorithm("SHA512") + algSha256 = hashAlgorithm("sha256") + algSha512 = hashAlgorithm("sha512") ) type config struct { diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 18fc1e8..ca5cccc 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -47,6 +47,7 @@ type hashFetchInfo struct { type downloader struct { cfg *config + client *util.Client // Used for testing keys *crypto.KeyRing validator csaf.RemoteValidator forwarder *forwarder @@ -131,6 +132,11 @@ func (d *downloader) httpClient() util.Client { client := util.Client(&hClient) + // Overwrite for testing purposes + if client != nil { + client = *d.client + } + // Add extra headers. if len(d.cfg.ExtraHeader) > 0 { client = &util.HeaderClient{ diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go new file mode 100644 index 0000000..cf02035 --- /dev/null +++ b/cmd/csaf_downloader/downloader_test.go @@ -0,0 +1,218 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +package main + +import ( + "context" + "errors" + "html/template" + "log/slog" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + + "github.com/csaf-poc/csaf_distribution/v3/internal/options" + "github.com/csaf-poc/csaf_distribution/v3/util" +) + +type ProviderParams struct { + URL string + EnableSha256 bool + EnableSha512 bool +} + +func ProviderHandler(params *ProviderParams, directoryProvider bool) http.HandlerFunc { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + path := "../../testdata/" + if directoryProvider { + path += "simple-directory-provider" + } else { + path += "simple-rolie-provider" + } + + path += r.URL.Path + + if strings.HasSuffix(r.URL.Path, "/") { + path += "index.html" + } + + content, err := os.ReadFile(path) + if err != nil { + w.WriteHeader(http.StatusNotFound) + return + } + switch { + case strings.HasSuffix(path, ".html"): + w.Header().Add("Content-Type", "text/html") + case strings.HasSuffix(path, ".json"): + w.Header().Add("Content-Type", "application/json") + case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: + w.WriteHeader(http.StatusNotFound) + return + case strings.HasSuffix(path, ".sha512") && directoryProvider && !params.EnableSha512: + w.WriteHeader(http.StatusNotFound) + return + default: + w.Header().Add("Content-Type", "text/plain") + } + + tmplt, err := template.New("base").Parse(string(content)) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + err = tmplt.Execute(w, params) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + }) +} + +func checkIfFileExists(path string, t *testing.T) bool { + if _, err := os.Stat(path); err == nil { + return true + } else if errors.Is(err, os.ErrNotExist) { + return false + } else { + t.Fatalf("Failed to check if file exists: %v", err) + return false + } +} + +func TestShaMarking(t *testing.T) { + tests := []struct { + name string + directoryProvider bool + wantSha256 bool + wantSha512 bool + enableSha256 bool + enableSha512 bool + preferredHash hashAlgorithm + }{ + { + name: "want sha256 and sha512", + directoryProvider: false, + wantSha256: true, + wantSha512: true, + enableSha256: true, + enableSha512: true, + }, + { + name: "only want sha256", + directoryProvider: false, + wantSha256: true, + wantSha512: false, + enableSha256: true, + enableSha512: true, + preferredHash: algSha256, + }, + { + name: "only want sha512", + directoryProvider: false, + wantSha256: false, + wantSha512: true, + enableSha256: true, + enableSha512: true, + preferredHash: algSha512, + }, + { + name: "only want sha512", + directoryProvider: false, + wantSha256: false, + wantSha512: true, + enableSha256: true, + enableSha512: true, + preferredHash: algSha512, + }, + + { + name: "only deliver sha256", + directoryProvider: false, + wantSha256: true, + wantSha512: false, + enableSha256: true, + enableSha512: false, + preferredHash: algSha512, + }, + { + name: "only want sha256, directory provider", + directoryProvider: true, + wantSha256: true, + wantSha512: false, + enableSha256: true, + enableSha512: true, + preferredHash: algSha256, + }, + { + name: "only want sha512, directory provider", + directoryProvider: true, + wantSha256: false, + wantSha512: true, + enableSha256: true, + enableSha512: true, + preferredHash: algSha512, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + serverURL := "" + params := ProviderParams{ + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + } + server := httptest.NewTLSServer(ProviderHandler(¶ms, test.directoryProvider)) + defer server.Close() + + serverURL = server.URL + params.URL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + tempDir := t.TempDir() + cfg := config{LogLevel: &options.LogLevel{Level: slog.LevelDebug}, Directory: tempDir, PreferredHash: test.preferredHash} + err := cfg.prepare() + if err != nil { + t.Fatalf("SHA marking config failed: %v", err) + } + d, err := newDownloader(&cfg) + if err != nil { + t.Fatalf("could not init downloader: %v", err) + } + d.client = &client + + ctx := context.Background() + err = d.run(ctx, []string{serverURL + "/provider-metadata.json"}) + if err != nil { + t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) + } + d.close() + + // Check for downloaded hashes + sha256Exists := checkIfFileExists(tempDir+"/white/2020/avendor-advisory-0004.json.sha256", t) + sha512Exists := checkIfFileExists(tempDir+"/white/2020/avendor-advisory-0004.json.sha512", t) + + if sha256Exists != test.wantSha256 { + t.Errorf("%v: expected sha256 hash present to be %v, got: %v", test.name, test.wantSha256, sha256Exists) + } + + if sha512Exists != test.wantSha512 { + t.Errorf("%v: expected sha512 hash present to be %v, got: %v", test.name, test.wantSha512, sha512Exists) + } + }) + } +} diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index b21ddc6..aa3c38a 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -352,7 +352,7 @@ func (pmdl *ProviderMetadataLoader) loadFromURL(path string) *LoadedProviderMeta case len(errors) > 0: result.Messages = []ProviderMetadataLoadMessage{{ Type: SchemaValidationFailed, - Message: fmt.Sprintf("%s: Validating against JSON schema failed: %v", path, err), + Message: fmt.Sprintf("%s: Validating against JSON schema failed", path), }} for _, msg := range errors { result.Messages.Add( diff --git a/testdata/simple-directory-provider/openpgp/info.txt b/testdata/simple-directory-provider/openpgp/info.txt new file mode 100644 index 0000000..3a159f6 --- /dev/null +++ b/testdata/simple-directory-provider/openpgp/info.txt @@ -0,0 +1,2 @@ +The GPG key was generated with no passphrase and this command: +`gpg --default-new-key-algo "ed25519/cert,sign+cv25519/encr" --quick-generate-key security@example.com"` diff --git a/testdata/simple-directory-provider/openpgp/privkey.asc b/testdata/simple-directory-provider/openpgp/privkey.asc new file mode 100644 index 0000000..816f309 --- /dev/null +++ b/testdata/simple-directory-provider/openpgp/privkey.asc @@ -0,0 +1,15 @@ +-----BEGIN PGP PRIVATE KEY BLOCK----- + +lFgEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr4AAQD5G5xy/yTN5b+lvV5Ahrbz1qOZ/wmKTieGOH9GZb6JwhHwtBRzZWN1 +cml0eUBleGFtcGxlLmNvbYiZBBMWCgBBFiEEqJFMovEROcammgAY+zzZsV3mFZYF +AmbbEDcCGwMFCQWjmoAFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQ+zzZ +sV3mFZZskQEAg5Dttqm6TA7MtLxz7VSlklx95LQr9d5jm4jcOaqlGT0A/1mAAlUq +SDySFGI6DFQLcaZaUd9Yl+1b0Icr0tUiOaQHnF0EZtsQNxIKKwYBBAGXVQEFAQEH +QOTHP4FkopIGJMWXTYsaeQ1Dugd+yNYWB357vRYq6QsiAwEIBwAA/0RIazq1s8Oe +23jvNaZGb/adDYnRrkCMXXTBKsuA6WOAEhKIeAQYFgoAIBYhBKiRTKLxETnGppoA +GPs82bFd5hWWBQJm2xA3AhsMAAoJEPs82bFd5hWWDKABAOl+NoM6FBhKAvckUXDR +MLZ4k778N4Vy9VHbectjRKj1AQCO3JOmON+U6/mjohXrc2bwzKzt2yGiLP2HMxDx +uzMXBQ== +=4XHC +-----END PGP PRIVATE KEY BLOCK----- diff --git a/testdata/simple-directory-provider/openpgp/pubkey.asc b/testdata/simple-directory-provider/openpgp/pubkey.asc new file mode 100644 index 0000000..88cb720 --- /dev/null +++ b/testdata/simple-directory-provider/openpgp/pubkey.asc @@ -0,0 +1,13 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mDMEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr60FHNlY3VyaXR5QGV4YW1wbGUuY29tiJkEExYKAEEWIQSokUyi8RE5xqaa +ABj7PNmxXeYVlgUCZtsQNwIbAwUJBaOagAULCQgHAgIiAgYVCgkICwIEFgIDAQIe +BwIXgAAKCRD7PNmxXeYVlmyRAQCDkO22qbpMDsy0vHPtVKWSXH3ktCv13mObiNw5 +qqUZPQD/WYACVSpIPJIUYjoMVAtxplpR31iX7VvQhyvS1SI5pAe4OARm2xA3Egor +BgEEAZdVAQUBAQdA5Mc/gWSikgYkxZdNixp5DUO6B37I1hYHfnu9FirpCyIDAQgH +iHgEGBYKACAWIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZtsQNwIbDAAKCRD7PNmx +XeYVlgygAQDpfjaDOhQYSgL3JFFw0TC2eJO+/DeFcvVR23nLY0So9QEAjtyTpjjf +lOv5o6IV63Nm8Mys7dshoiz9hzMQ8bszFwU= +=rhGT +-----END PGP PUBLIC KEY BLOCK----- diff --git a/testdata/simple-directory-provider/provider-metadata.json b/testdata/simple-directory-provider/provider-metadata.json new file mode 100644 index 0000000..792afd3 --- /dev/null +++ b/testdata/simple-directory-provider/provider-metadata.json @@ -0,0 +1,25 @@ +{ + "canonical_url": "{{.URL}}/provider-metadata.json", + "distributions": [ + { + "directory_url": "{{.URL}}/white/" + } + ], + "last_updated": "2020-01-01T00:00:00Z", + "list_on_CSAF_aggregators": true, + "metadata_version": "2.0", + "mirror_on_CSAF_aggregators": true, + "public_openpgp_keys": [ + { + "fingerprint": "A8914CA2F11139C6A69A0018FB3CD9B15DE61596", + "url": "{{.URL}}/openpgp/pubkey.asc" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc", + "namespace": "https://example.com", + "contact_details": "mailto:security@example.com" + }, + "role": "csaf_trusted_provider" +} diff --git a/testdata/simple-directory-provider/security.txt b/testdata/simple-directory-provider/security.txt new file mode 100644 index 0000000..0ae943d --- /dev/null +++ b/testdata/simple-directory-provider/security.txt @@ -0,0 +1,2 @@ +CSAF: /provider-metadata.json + diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json b/testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json new file mode 100644 index 0000000..0e194e9 --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004-not-listed.json @@ -0,0 +1,170 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json b/testdata/simple-directory-provider/white/avendor-advisory-0004.json new file mode 100644 index 0000000..0e194e9 --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json @@ -0,0 +1,170 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc new file mode 100644 index 0000000..9dff47b --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.asc @@ -0,0 +1,7 @@ +-----BEGIN PGP SIGNATURE----- + +iHUEABYKAB0WIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZukv9QAKCRD7PNmxXeYV +ljq0AP9n/rTgoNCJzSTZzNrrMy28ZR+Ppp1MSPWGFUzsx6qLJgD/d8cu0lokMsXf +y0uc9k7hrla/ajFUzNt3AVvT+CPFtAo= +=7E66 +-----END PGP SIGNATURE----- diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 new file mode 100644 index 0000000..851b27c --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha256 @@ -0,0 +1 @@ +cb263bf1beab18b893de63f2966d0d8c5f38d60101c24d3fd7a5feebaad02c3b avendor-advisory-0004.json diff --git a/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 new file mode 100644 index 0000000..6703550 --- /dev/null +++ b/testdata/simple-directory-provider/white/avendor-advisory-0004.json.sha512 @@ -0,0 +1 @@ +39476e1d08a0871d166091c90de259544382a3599eebda118a93468499a30fd034286086c461a97d3d5298e093b0be3868e8d89d8a6a255c4aa6adb81ebbfcad avendor-advisory-0004.json diff --git a/testdata/simple-directory-provider/white/changes.csv b/testdata/simple-directory-provider/white/changes.csv new file mode 100644 index 0000000..4acdb29 --- /dev/null +++ b/testdata/simple-directory-provider/white/changes.csv @@ -0,0 +1 @@ +"avendor-advisory-0004.json","2020-01-01T00:00:00+00:00" diff --git a/testdata/simple-directory-provider/white/index.html b/testdata/simple-directory-provider/white/index.html new file mode 100644 index 0000000..bcfabd9 --- /dev/null +++ b/testdata/simple-directory-provider/white/index.html @@ -0,0 +1,6 @@ + + + + avendor-advisory-0004 + + diff --git a/testdata/simple-directory-provider/white/index.txt b/testdata/simple-directory-provider/white/index.txt new file mode 100644 index 0000000..d19d30f --- /dev/null +++ b/testdata/simple-directory-provider/white/index.txt @@ -0,0 +1 @@ +avendor-advisory-0004.json diff --git a/testdata/simple-rolie-provider/openpgp/info.txt b/testdata/simple-rolie-provider/openpgp/info.txt new file mode 100644 index 0000000..3a159f6 --- /dev/null +++ b/testdata/simple-rolie-provider/openpgp/info.txt @@ -0,0 +1,2 @@ +The GPG key was generated with no passphrase and this command: +`gpg --default-new-key-algo "ed25519/cert,sign+cv25519/encr" --quick-generate-key security@example.com"` diff --git a/testdata/simple-rolie-provider/openpgp/privkey.asc b/testdata/simple-rolie-provider/openpgp/privkey.asc new file mode 100644 index 0000000..816f309 --- /dev/null +++ b/testdata/simple-rolie-provider/openpgp/privkey.asc @@ -0,0 +1,15 @@ +-----BEGIN PGP PRIVATE KEY BLOCK----- + +lFgEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr4AAQD5G5xy/yTN5b+lvV5Ahrbz1qOZ/wmKTieGOH9GZb6JwhHwtBRzZWN1 +cml0eUBleGFtcGxlLmNvbYiZBBMWCgBBFiEEqJFMovEROcammgAY+zzZsV3mFZYF +AmbbEDcCGwMFCQWjmoAFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQ+zzZ +sV3mFZZskQEAg5Dttqm6TA7MtLxz7VSlklx95LQr9d5jm4jcOaqlGT0A/1mAAlUq +SDySFGI6DFQLcaZaUd9Yl+1b0Icr0tUiOaQHnF0EZtsQNxIKKwYBBAGXVQEFAQEH +QOTHP4FkopIGJMWXTYsaeQ1Dugd+yNYWB357vRYq6QsiAwEIBwAA/0RIazq1s8Oe +23jvNaZGb/adDYnRrkCMXXTBKsuA6WOAEhKIeAQYFgoAIBYhBKiRTKLxETnGppoA +GPs82bFd5hWWBQJm2xA3AhsMAAoJEPs82bFd5hWWDKABAOl+NoM6FBhKAvckUXDR +MLZ4k778N4Vy9VHbectjRKj1AQCO3JOmON+U6/mjohXrc2bwzKzt2yGiLP2HMxDx +uzMXBQ== +=4XHC +-----END PGP PRIVATE KEY BLOCK----- diff --git a/testdata/simple-rolie-provider/openpgp/pubkey.asc b/testdata/simple-rolie-provider/openpgp/pubkey.asc new file mode 100644 index 0000000..88cb720 --- /dev/null +++ b/testdata/simple-rolie-provider/openpgp/pubkey.asc @@ -0,0 +1,13 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mDMEZtsQNxYJKwYBBAHaRw8BAQdASr3y4zW+4XGqUlvRJ7stRCUHv8HB4ZoMoTtU +KLgnHr60FHNlY3VyaXR5QGV4YW1wbGUuY29tiJkEExYKAEEWIQSokUyi8RE5xqaa +ABj7PNmxXeYVlgUCZtsQNwIbAwUJBaOagAULCQgHAgIiAgYVCgkICwIEFgIDAQIe +BwIXgAAKCRD7PNmxXeYVlmyRAQCDkO22qbpMDsy0vHPtVKWSXH3ktCv13mObiNw5 +qqUZPQD/WYACVSpIPJIUYjoMVAtxplpR31iX7VvQhyvS1SI5pAe4OARm2xA3Egor +BgEEAZdVAQUBAQdA5Mc/gWSikgYkxZdNixp5DUO6B37I1hYHfnu9FirpCyIDAQgH +iHgEGBYKACAWIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZtsQNwIbDAAKCRD7PNmx +XeYVlgygAQDpfjaDOhQYSgL3JFFw0TC2eJO+/DeFcvVR23nLY0So9QEAjtyTpjjf +lOv5o6IV63Nm8Mys7dshoiz9hzMQ8bszFwU= +=rhGT +-----END PGP PUBLIC KEY BLOCK----- diff --git a/testdata/simple-rolie-provider/provider-metadata.json b/testdata/simple-rolie-provider/provider-metadata.json new file mode 100644 index 0000000..7abb316 --- /dev/null +++ b/testdata/simple-rolie-provider/provider-metadata.json @@ -0,0 +1,33 @@ +{ + "canonical_url": "{{.URL}}/provider-metadata.json", + "distributions": [ + { + "rolie": { + "feeds": [ + { + "summary": "TLP:WHITE advisories", + "tlp_label": "WHITE", + "url": "{{.URL}}/white/white-feed.json" + } + ] + } + } + ], + "last_updated": "2020-01-01T00:00:00Z", + "list_on_CSAF_aggregators": true, + "metadata_version": "2.0", + "mirror_on_CSAF_aggregators": true, + "public_openpgp_keys": [ + { + "fingerprint": "A8914CA2F11139C6A69A0018FB3CD9B15DE61596", + "url": "{{.URL}}/openpgp/pubkey.asc" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc", + "namespace": "https://example.com", + "contact_details": "mailto:security@example.com" + }, + "role": "csaf_trusted_provider" +} diff --git a/testdata/simple-rolie-provider/security.txt b/testdata/simple-rolie-provider/security.txt new file mode 100644 index 0000000..0ae943d --- /dev/null +++ b/testdata/simple-rolie-provider/security.txt @@ -0,0 +1,2 @@ +CSAF: /provider-metadata.json + diff --git a/testdata/simple-rolie-provider/service.json b/testdata/simple-rolie-provider/service.json new file mode 100644 index 0000000..500d882 --- /dev/null +++ b/testdata/simple-rolie-provider/service.json @@ -0,0 +1,23 @@ +{ + "service": { + "workspace": [ + { + "title": "CSAF feeds", + "collection": [ + { + "title": "CSAF feed (TLP:WHITE)", + "href": "/white/white-feed.json", + "categories": { + "category": [ + { + "scheme": "urn:ietf:params:rolie:category:information-type", + "term": "csaf" + } + ] + } + } + ] + } + ] + } +} diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json new file mode 100644 index 0000000..0e194e9 --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json @@ -0,0 +1,170 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc new file mode 100644 index 0000000..9dff47b --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.asc @@ -0,0 +1,7 @@ +-----BEGIN PGP SIGNATURE----- + +iHUEABYKAB0WIQSokUyi8RE5xqaaABj7PNmxXeYVlgUCZukv9QAKCRD7PNmxXeYV +ljq0AP9n/rTgoNCJzSTZzNrrMy28ZR+Ppp1MSPWGFUzsx6qLJgD/d8cu0lokMsXf +y0uc9k7hrla/ajFUzNt3AVvT+CPFtAo= +=7E66 +-----END PGP SIGNATURE----- diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 new file mode 100644 index 0000000..851b27c --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha256 @@ -0,0 +1 @@ +cb263bf1beab18b893de63f2966d0d8c5f38d60101c24d3fd7a5feebaad02c3b avendor-advisory-0004.json diff --git a/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 new file mode 100644 index 0000000..6703550 --- /dev/null +++ b/testdata/simple-rolie-provider/white/avendor-advisory-0004.json.sha512 @@ -0,0 +1 @@ +39476e1d08a0871d166091c90de259544382a3599eebda118a93468499a30fd034286086c461a97d3d5298e093b0be3868e8d89d8a6a255c4aa6adb81ebbfcad avendor-advisory-0004.json diff --git a/testdata/simple-rolie-provider/white/white-feed.json b/testdata/simple-rolie-provider/white/white-feed.json new file mode 100644 index 0000000..1bc17bc --- /dev/null +++ b/testdata/simple-rolie-provider/white/white-feed.json @@ -0,0 +1,61 @@ +{ + "feed": { + "id": "csaf-feed-tlp-white", + "title": "CSAF feed (TLP:WHITE)", + "link": [ + { + "rel": "self", + "href": "/white/csaf-feed-tlp-white.json" + }, + { + "rel": "service", + "href": "/service.json" + } + ], + "category": [ + { + "scheme": "urn:ietf:params:rolie:category:information-type", + "term": "csaf" + } + ], + "updated": "2020-01-01T00:00:00Z", + "entry": [ + { + "id": "Avendor-advisory-0004", + "title": "Test CSAF document", + "link": [ + { + "rel": "self", + "href": "/white/avendor-advisory-0004.json" + }, + {{if .EnableSha256}} + { + "rel": "hash", + "href": "/white/avendor-advisory-0004.json.sha256" + }, + {{end}} + {{if .EnableSha512}} + { + "rel": "hash", + "href": "/white/avendor-advisory-0004.json.sha512" + }, + {{end}} + { + "rel": "signature", + "href": "/white/avendor-advisory-0004.json.asc" + } + ], + "published": "2020-01-01T00:00:00Z", + "updated": "2020-01-01T00:00:00Z", + "content": { + "type": "application/json", + "src": "/avendor-advisory-0004.json" + }, + "format": { + "schema": "https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json", + "version": "2.0" + } + } + ] + } +} From 56509bbb4d868454d01e3b7ce9dffd8bdb658e58 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 27 Nov 2024 12:51:38 +0100 Subject: [PATCH 124/235] Use new path in tests --- cmd/csaf_aggregator/client_test.go | 2 +- cmd/csaf_checker/links_test.go | 2 +- cmd/csaf_downloader/downloader_test.go | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cmd/csaf_aggregator/client_test.go b/cmd/csaf_aggregator/client_test.go index c08b29a..fc5b095 100644 --- a/cmd/csaf_aggregator/client_test.go +++ b/cmd/csaf_aggregator/client_test.go @@ -14,7 +14,7 @@ import ( "net/http/httptest" "testing" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) func Test_downloadJSON(t *testing.T) { diff --git a/cmd/csaf_checker/links_test.go b/cmd/csaf_checker/links_test.go index aa04222..6baccf8 100644 --- a/cmd/csaf_checker/links_test.go +++ b/cmd/csaf_checker/links_test.go @@ -15,7 +15,7 @@ import ( "strings" "testing" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/util" ) const page0 = ` diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go index cf02035..1ae1524 100644 --- a/cmd/csaf_downloader/downloader_test.go +++ b/cmd/csaf_downloader/downloader_test.go @@ -19,8 +19,8 @@ import ( "strings" "testing" - "github.com/csaf-poc/csaf_distribution/v3/internal/options" - "github.com/csaf-poc/csaf_distribution/v3/util" + "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/util" ) type ProviderParams struct { From b8a98033bf3721bdec6a055dfb07873e2306e512 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 28 Nov 2024 15:58:20 +0100 Subject: [PATCH 125/235] fix docs link to standard --- docs/proxy-provider-for-aggregator.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/proxy-provider-for-aggregator.md b/docs/proxy-provider-for-aggregator.md index f34d714..4148f52 100644 --- a/docs/proxy-provider-for-aggregator.md +++ b/docs/proxy-provider-for-aggregator.md @@ -5,7 +5,9 @@ calls it a *CSAF publisher*. After manually downloading the advisories from such a publisher, the tools here can be used to offer the CSAF files for automated downloading -as *CSAF aggregator*. (The construct is called *CSAF proxy provider*. See [Section 7.2.5](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html#725-role-csaf-aggregator) for more details.) +as *CSAF aggregator*. (The construct is called *CSAF proxy provider*. +See [Section 7.2.5](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html#725-role-csaf-aggregator) +for more details.) There are three necessary steps, easiest is to use one single virtual maschine (or container) per internal provider. From 1daaed2c516d3fd674eb99c39dfc5f87ba43f78a Mon Sep 17 00:00:00 2001 From: ncsc-ie-devs <112564016+ncsc-ie-devs@users.noreply.github.com> Date: Mon, 2 Dec 2024 10:42:54 +0000 Subject: [PATCH 126/235] ensure HTTP requests use proxy env vars (#597) * fix: ensure HTTP requests use proxy env vars Updated all instances of `http.Transport` to include the `Proxy` field set to `http.ProxyFromEnvironment`. This ensures that the application respects proxy configuration defined by the `HTTP_PROXY`, `HTTPS_PROXY`, and `NO_PROXY` environment variables. ### Changes: - Modified `http.Transport` initialization across the codebase to use: ```go Proxy: http.ProxyFromEnvironment ``` - Ensured TLS configurations remain intact by preserving `TLSClientConfig`. ### Why: - Previously, HTTP requests bypassed proxy settings due to missing configuration in the transport layer. - This fix enables compatibility with proxied environments, aligning with standard Go behavior. ### Impact: - All HTTP and HTTPS traffic now adheres to proxy settings. - Domains listed in `NO_PROXY` bypass the proxy as expected. ### Verification: - Tested with proxy environment variables set (`HTTP_PROXY`, `HTTPS_PROXY`). - Verified requests route through the proxy and `NO_PROXY` works as intended. * reformat with fmt --------- Co-authored-by: Cormac Doherty --- cmd/csaf_aggregator/config.go | 1 + cmd/csaf_checker/processor.go | 2 ++ cmd/csaf_downloader/downloader.go | 1 + cmd/csaf_downloader/forwarder.go | 1 + cmd/csaf_uploader/processor.go | 1 + 5 files changed, 6 insertions(+) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 81db0b7..3c2c46b 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -284,6 +284,7 @@ func (c *config) httpClient(p *provider) util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 5fd3fbd..5d1b69b 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -430,6 +430,7 @@ func (p *processor) fullClient() util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) @@ -460,6 +461,7 @@ func (p *processor) basicClient() *http.Client { if p.cfg.Insecure { tr := &http.Transport{ TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + Proxy: http.ProxyFromEnvironment, } return &http.Client{Transport: tr} } diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index f21fcc0..b7e7342 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -121,6 +121,7 @@ func (d *downloader) httpClient() util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index 12d9fe4..1598283 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -106,6 +106,7 @@ func (f *forwarder) httpClient() util.Client { hClient.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } client := util.Client(&hClient) diff --git a/cmd/csaf_uploader/processor.go b/cmd/csaf_uploader/processor.go index b57cafb..f655e02 100644 --- a/cmd/csaf_uploader/processor.go +++ b/cmd/csaf_uploader/processor.go @@ -51,6 +51,7 @@ func (p *processor) httpClient() *http.Client { client.Transport = &http.Transport{ TLSClientConfig: &tlsConfig, + Proxy: http.ProxyFromEnvironment, } return &client From 57953e495f10c26312a05eec3d1e7acb2a40e363 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 13:23:57 +0100 Subject: [PATCH 127/235] Warn if no remote validator was specified --- cmd/csaf_validator/main.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index b07c2f4..6985509 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -69,6 +69,8 @@ func run(opts *options, files []string) error { "preparing remote validator failed: %w", err) } defer validator.Close() + } else { + log.Printf("warn: no remote validator specified") } // Select amount level of output for remote validation. From 938ceb872ac4b5460379c86b89b6ca0db6ed72f2 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 13:53:56 +0100 Subject: [PATCH 128/235] Return exit code based on validation result --- cmd/csaf_validator/main.go | 13 +++++++++++++ docs/csaf_validator.md | 7 +++++++ 2 files changed, 20 insertions(+) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 6985509..4a9e827 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -22,6 +22,13 @@ import ( "github.com/gocsaf/csaf/v3/util" ) +const ( + exitCodeAllValid = 0 + exitCodeSchemaInvalid = 1 << 0 + exitCodeNoRemoteValidator = 1 << 1 + exitCodeFailedRemoteValidation = 1 << 2 +) + type options struct { Version bool `long:"version" description:"Display version of the binary"` RemoteValidator string `long:"validator" description:"URL to validate documents remotely" value-name:"URL"` @@ -53,6 +60,7 @@ func main() { // run validates the given files. func run(opts *options, files []string) error { + exitCode := exitCodeAllValid var validator csaf.RemoteValidator eval := util.NewPathEval() @@ -70,6 +78,7 @@ func run(opts *options, files []string) error { } defer validator.Close() } else { + exitCode |= exitCodeNoRemoteValidator log.Printf("warn: no remote validator specified") } @@ -106,6 +115,7 @@ func run(opts *options, files []string) error { } if len(validationErrs) > 0 { + exitCode |= exitCodeSchemaInvalid fmt.Printf("schema validation errors of %q\n", file) for _, vErr := range validationErrs { fmt.Printf(" * %s\n", vErr) @@ -132,12 +142,15 @@ func run(opts *options, files []string) error { if rvr.Valid { passes = "passes" } else { + exitCode |= exitCodeFailedRemoteValidation passes = "does not pass" } fmt.Printf("%q %s remote validation.\n", file, passes) } } + // Exit code is based on validation results + os.Exit(exitCodeAllValid) return nil } diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index dfa0c9a..74dbaaf 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -2,6 +2,13 @@ is a tool to validate local advisories files against the JSON Schema and an optional remote validator. +### Exit codes +If no fatal error occurs the program will exit with the following codes: +- `0`: all valid +- `2⁰`: schema invalid +- `2¹`: no remote validator configured +- `2²`: failure in remote validation + ### Usage ``` From 16e86051c5d1b0912a179eb2b30ba568da4e81ce Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Wed, 4 Dec 2024 14:27:24 +0100 Subject: [PATCH 129/235] Be more precise about exit codes. --- cmd/csaf_validator/main.go | 8 ++++---- docs/csaf_validator.md | 10 +++++----- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 4a9e827..9e844b7 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -23,10 +23,10 @@ import ( ) const ( - exitCodeAllValid = 0 - exitCodeSchemaInvalid = 1 << 0 - exitCodeNoRemoteValidator = 1 << 1 - exitCodeFailedRemoteValidation = 1 << 2 + exitCodeSchemaInvalid = 1 << iota + exitCodeNoRemoteValidator + exitCodeFailedRemoteValidation + exitCodeAllValid = 0 ) type options struct { diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index 74dbaaf..64ded6d 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -3,11 +3,11 @@ is a tool to validate local advisories files against the JSON Schema and an optional remote validator. ### Exit codes -If no fatal error occurs the program will exit with the following codes: -- `0`: all valid -- `2⁰`: schema invalid -- `2¹`: no remote validator configured -- `2²`: failure in remote validation +If no fatal error occurs the program will exit with an exit code `n` with the following conditions: +- `n == 0`: all valid +- `(n / 2) % 1 == 1`: schema validation failed +- `(n / 4) % 1 == 1`: no remote validator configured +- `(n / 8) % 1 == 1`: failure in remote validation ### Usage From a51964be3f6a9360ed0c4e05ccc5bcc8418d0f7e Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 16:02:03 +0100 Subject: [PATCH 130/235] Add initial csaf_checker provider test --- cmd/csaf_checker/processor_test.go | 103 +++++++++++++++++++++++++ cmd/csaf_downloader/downloader_test.go | 62 +-------------- internal/testutil/testutil.go | 73 ++++++++++++++++++ 3 files changed, 179 insertions(+), 59 deletions(-) create mode 100644 cmd/csaf_checker/processor_test.go create mode 100644 internal/testutil/testutil.go diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go new file mode 100644 index 0000000..b8b1b1f --- /dev/null +++ b/cmd/csaf_checker/processor_test.go @@ -0,0 +1,103 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +package main + +import ( + "net/http/httptest" + "testing" + + "github.com/gocsaf/csaf/v3/internal/testutil" + "github.com/gocsaf/csaf/v3/util" +) + +func TestShaMarking(t *testing.T) { + tests := []struct { + name string + directoryProvider bool + enableSha256 bool + enableSha512 bool + }{ + { + name: "deliver sha256 and sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + }, + { + name: "only deliver sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: false, + }, + { + name: "only deliver sha512", + directoryProvider: false, + enableSha256: false, + enableSha512: true, + }, + { + name: "only deliver sha256 and sha512, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: true, + }, + { + name: "only deliver sha256, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: false, + }, + { + name: "only deliver sha512, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: true, + }, + } + + t.Parallel() + for _, testToRun := range tests { + test := testToRun + t.Run(test.name, func(tt *testing.T) { + tt.Parallel() + serverURL := "" + params := testutil.ProviderParams{ + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + } + server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) + defer server.Close() + + serverURL = server.URL + params.URL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + cfg := config{} + err := cfg.prepare() + if err != nil { + t.Fatalf("SHA marking config failed: %v", err) + } + p, err := newProcessor(&cfg) + if err != nil { + t.Fatalf("could not init downloader: %v", err) + } + p.client = client + + // TODO check result of processor + _, err = p.run([]string{serverURL + "/provider-metadata.json"}) + if err != nil { + t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) + } + p.close() + }) + } +} diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go index 1ae1524..d7eaae3 100644 --- a/cmd/csaf_downloader/downloader_test.go +++ b/cmd/csaf_downloader/downloader_test.go @@ -11,72 +11,16 @@ package main import ( "context" "errors" - "html/template" "log/slog" - "net/http" "net/http/httptest" "os" - "strings" "testing" "github.com/gocsaf/csaf/v3/internal/options" + "github.com/gocsaf/csaf/v3/internal/testutil" "github.com/gocsaf/csaf/v3/util" ) -type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool -} - -func ProviderHandler(params *ProviderParams, directoryProvider bool) http.HandlerFunc { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - path := "../../testdata/" - if directoryProvider { - path += "simple-directory-provider" - } else { - path += "simple-rolie-provider" - } - - path += r.URL.Path - - if strings.HasSuffix(r.URL.Path, "/") { - path += "index.html" - } - - content, err := os.ReadFile(path) - if err != nil { - w.WriteHeader(http.StatusNotFound) - return - } - switch { - case strings.HasSuffix(path, ".html"): - w.Header().Add("Content-Type", "text/html") - case strings.HasSuffix(path, ".json"): - w.Header().Add("Content-Type", "application/json") - case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: - w.WriteHeader(http.StatusNotFound) - return - case strings.HasSuffix(path, ".sha512") && directoryProvider && !params.EnableSha512: - w.WriteHeader(http.StatusNotFound) - return - default: - w.Header().Add("Content-Type", "text/plain") - } - - tmplt, err := template.New("base").Parse(string(content)) - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - return - } - err = tmplt.Execute(w, params) - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - return - } - }) -} - func checkIfFileExists(path string, t *testing.T) bool { if _, err := os.Stat(path); err == nil { return true @@ -169,12 +113,12 @@ func TestShaMarking(t *testing.T) { t.Run(test.name, func(tt *testing.T) { tt.Parallel() serverURL := "" - params := ProviderParams{ + params := testutil.ProviderParams{ URL: "", EnableSha256: test.enableSha256, EnableSha512: test.enableSha512, } - server := httptest.NewTLSServer(ProviderHandler(¶ms, test.directoryProvider)) + server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) defer server.Close() serverURL = server.URL diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go new file mode 100644 index 0000000..455d217 --- /dev/null +++ b/internal/testutil/testutil.go @@ -0,0 +1,73 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +// Package testutil contains shared helper functions for testing the application. +package testutil + +import ( + "html/template" + "net/http" + "os" + "strings" +) + +// ProviderParams configures the test provider. +type ProviderParams struct { + URL string + EnableSha256 bool + EnableSha512 bool +} + +// ProviderHandler returns a test provider handler with the specified configuration. +func ProviderHandler(params *ProviderParams, directoryProvider bool) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + path := "../../testdata/" + if directoryProvider { + path += "simple-directory-provider" + } else { + path += "simple-rolie-provider" + } + + path += r.URL.Path + + if strings.HasSuffix(r.URL.Path, "/") { + path += "index.html" + } + + content, err := os.ReadFile(path) + if err != nil { + w.WriteHeader(http.StatusNotFound) + return + } + switch { + case strings.HasSuffix(path, ".html"): + w.Header().Add("Content-Type", "text/html") + case strings.HasSuffix(path, ".json"): + w.Header().Add("Content-Type", "application/json") + case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: + w.WriteHeader(http.StatusNotFound) + return + case strings.HasSuffix(path, ".sha512") && directoryProvider && !params.EnableSha512: + w.WriteHeader(http.StatusNotFound) + return + default: + w.Header().Add("Content-Type", "text/plain") + } + + tmplt, err := template.New("base").Parse(string(content)) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + err = tmplt.Execute(w, params) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + } +} From 5b6af7a4ad26bb53795e94fe3576a636b0b81df1 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 4 Dec 2024 17:52:00 +0100 Subject: [PATCH 131/235] WIP: Add requirement tests --- cmd/csaf_checker/processor_test.go | 106 +++++++++++++++++- testdata/simple-rolie-provider/service.json | 2 +- .../white/white-feed.json | 14 +-- 3 files changed, 112 insertions(+), 10 deletions(-) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index b8b1b1f..73574bd 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -9,55 +9,150 @@ package main import ( + "fmt" "net/http/httptest" + "reflect" "testing" "github.com/gocsaf/csaf/v3/internal/testutil" "github.com/gocsaf/csaf/v3/util" ) +func getBaseRequirements(url string) []Requirement { + return []Requirement{ + { + Num: 1, + Description: "Valid CSAF documents", + Messages: []Message{{Type: 1, Text: "No remote validator configured"}, {Type: 0, Text: "All advisories validated fine against the schema."}}, + }, { + Num: 2, + Description: "Filename", + Messages: []Message{{Type: 0, Text: "All found filenames are conforming."}}}, + { + Num: 3, + Description: "TLS", + Messages: []Message{{Type: 0, Text: "All tested URLs were HTTPS."}}}, + { + Num: 4, + Description: "TLP:WHITE", + Messages: []Message{{Type: 0, Text: "All advisories labeled TLP:WHITE were freely accessible."}}}, + { + Num: 5, + Description: "TLP:AMBER and TLP:RED", + Messages: []Message{ + {Type: 0, Text: "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility."}}}, + { + Num: 6, + Description: "Redirects", + Messages: []Message{{Type: 0, Text: "No redirections found."}}}, + { + Num: 7, + Description: "provider-metadata.json", + Messages: []Message{{Type: 0, Text: "Found good provider metadata."}}}, + { + Num: 8, + Description: "security.txt", + Messages: []Message{{Type: 0, Text: "Performed no test of security.txt since the direct url of the provider-metadata.json was used."}}}, + { + Num: 9, + Description: "/.well-known/csaf/provider-metadata.json", + Messages: []Message{{Type: 0, Text: "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used."}}}, + { + Num: 10, + Description: "DNS path", + Messages: []Message{{Type: 0, Text: "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used."}}}, + { + Num: 11, + Description: "One folder per year", + Messages: []Message{{Type: 2, Text: fmt.Sprintf("No year folder found in %s/white/avendor-advisory-0004.json", url)}}}, + { + Num: 12, + Description: "index.txt", + Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/index.txt", url)}}}, + { + Num: 13, + Description: "changes.csv", + Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/changes.csv", url)}}}, + { + Num: 14, + Description: "Directory listings", + Messages: []Message{{Type: 0, Text: "All directory listings are valid."}}}, + { + Num: 15, + Description: "ROLIE feed", + Messages: []Message{{Type: 2, Text: "ROLIE feed based distribution was not used."}}}, + { + Num: 16, + Description: "ROLIE service document", + Messages: []Message{{Type: 1, Text: "No ROLIE service document found."}}}, + { + Num: 17, + Description: "ROLIE category document", + Messages: []Message{{Type: 1, Text: "No ROLIE category document found."}}}, + { + Num: 18, + Description: "Integrity", + Messages: []Message{{Type: 0, Text: "All checksums match."}}}, + { + Num: 19, + Description: "Signatures", + Messages: []Message{{Type: 0, Text: "All signatures verified."}}}, + { + Num: 20, + Description: "Public OpenPGP Key", + Messages: []Message{{Type: 0, Text: "1 public OpenPGP key(s) loaded."}}}, + } +} + func TestShaMarking(t *testing.T) { tests := []struct { name string directoryProvider bool enableSha256 bool enableSha512 bool + expected func(string) []Requirement }{ { name: "deliver sha256 and sha512", directoryProvider: false, enableSha256: true, enableSha512: true, + expected: getBaseRequirements, }, { name: "only deliver sha256", directoryProvider: false, enableSha256: true, enableSha512: false, + expected: getBaseRequirements, }, { name: "only deliver sha512", directoryProvider: false, enableSha256: false, enableSha512: true, + expected: getBaseRequirements, }, { name: "only deliver sha256 and sha512, directory provider", directoryProvider: true, enableSha256: true, enableSha512: true, + expected: getBaseRequirements, }, { name: "only deliver sha256, directory provider", directoryProvider: true, enableSha256: true, enableSha512: false, + expected: getBaseRequirements, }, { name: "only deliver sha512, directory provider", directoryProvider: true, enableSha256: false, enableSha512: true, + expected: getBaseRequirements, }, } @@ -92,11 +187,18 @@ func TestShaMarking(t *testing.T) { } p.client = client - // TODO check result of processor - _, err = p.run([]string{serverURL + "/provider-metadata.json"}) + report, err := p.run([]string{serverURL + "/provider-metadata.json"}) if err != nil { t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) } + expected := test.expected(serverURL) + for i, got := range report.Domains[0].Requirements { + want := expected[i] + if !reflect.DeepEqual(*got, want) { + t.Errorf("SHA marking %v: Expected %v, got %v", test.name, want, *got) + } + } + p.close() }) } diff --git a/testdata/simple-rolie-provider/service.json b/testdata/simple-rolie-provider/service.json index 500d882..a398a40 100644 --- a/testdata/simple-rolie-provider/service.json +++ b/testdata/simple-rolie-provider/service.json @@ -6,7 +6,7 @@ "collection": [ { "title": "CSAF feed (TLP:WHITE)", - "href": "/white/white-feed.json", + "href": "{{.URL}}/white/white-feed.json", "categories": { "category": [ { diff --git a/testdata/simple-rolie-provider/white/white-feed.json b/testdata/simple-rolie-provider/white/white-feed.json index 1bc17bc..923a492 100644 --- a/testdata/simple-rolie-provider/white/white-feed.json +++ b/testdata/simple-rolie-provider/white/white-feed.json @@ -5,11 +5,11 @@ "link": [ { "rel": "self", - "href": "/white/csaf-feed-tlp-white.json" + "href": "{{.URL}}/white/csaf-feed-tlp-white.json" }, { "rel": "service", - "href": "/service.json" + "href": "{{.URL}}/service.json" } ], "category": [ @@ -26,30 +26,30 @@ "link": [ { "rel": "self", - "href": "/white/avendor-advisory-0004.json" + "href": "{{.URL}}/white/avendor-advisory-0004.json" }, {{if .EnableSha256}} { "rel": "hash", - "href": "/white/avendor-advisory-0004.json.sha256" + "href": "{{.URL}}/white/avendor-advisory-0004.json.sha256" }, {{end}} {{if .EnableSha512}} { "rel": "hash", - "href": "/white/avendor-advisory-0004.json.sha512" + "href": "{{.URL}}/white/avendor-advisory-0004.json.sha512" }, {{end}} { "rel": "signature", - "href": "/white/avendor-advisory-0004.json.asc" + "href": "{{.URL}}/white/avendor-advisory-0004.json.asc" } ], "published": "2020-01-01T00:00:00Z", "updated": "2020-01-01T00:00:00Z", "content": { "type": "application/json", - "src": "/avendor-advisory-0004.json" + "src": "{{.URL}}/avendor-advisory-0004.json" }, "format": { "schema": "https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json", From 68bd04676cc425dca87751bca989457baf5f56a1 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 6 Dec 2024 13:11:07 +0100 Subject: [PATCH 132/235] Add requirement checker test data --- cmd/csaf_checker/processor_test.go | 147 +++++------- .../sha256-directory.json | 206 +++++++++++++++++ .../processor-requirements/sha256-rolie.json | 210 ++++++++++++++++++ .../sha256-sha512-directory.json | 206 +++++++++++++++++ .../sha256-sha512-rolie.json | 210 ++++++++++++++++++ .../sha512-directory.json | 207 +++++++++++++++++ .../processor-requirements/sha512-rolie.json | 210 ++++++++++++++++++ 7 files changed, 1299 insertions(+), 97 deletions(-) create mode 100644 testdata/processor-requirements/sha256-directory.json create mode 100644 testdata/processor-requirements/sha256-rolie.json create mode 100644 testdata/processor-requirements/sha256-sha512-directory.json create mode 100644 testdata/processor-requirements/sha256-sha512-rolie.json create mode 100644 testdata/processor-requirements/sha512-directory.json create mode 100644 testdata/processor-requirements/sha512-rolie.json diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 73574bd..c4fb532 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -9,99 +9,54 @@ package main import ( - "fmt" + "bytes" + "encoding/json" "net/http/httptest" + "os" "reflect" "testing" + "text/template" "github.com/gocsaf/csaf/v3/internal/testutil" "github.com/gocsaf/csaf/v3/util" ) -func getBaseRequirements(url string) []Requirement { - return []Requirement{ - { - Num: 1, - Description: "Valid CSAF documents", - Messages: []Message{{Type: 1, Text: "No remote validator configured"}, {Type: 0, Text: "All advisories validated fine against the schema."}}, - }, { - Num: 2, - Description: "Filename", - Messages: []Message{{Type: 0, Text: "All found filenames are conforming."}}}, - { - Num: 3, - Description: "TLS", - Messages: []Message{{Type: 0, Text: "All tested URLs were HTTPS."}}}, - { - Num: 4, - Description: "TLP:WHITE", - Messages: []Message{{Type: 0, Text: "All advisories labeled TLP:WHITE were freely accessible."}}}, - { - Num: 5, - Description: "TLP:AMBER and TLP:RED", - Messages: []Message{ - {Type: 0, Text: "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility."}}}, - { - Num: 6, - Description: "Redirects", - Messages: []Message{{Type: 0, Text: "No redirections found."}}}, - { - Num: 7, - Description: "provider-metadata.json", - Messages: []Message{{Type: 0, Text: "Found good provider metadata."}}}, - { - Num: 8, - Description: "security.txt", - Messages: []Message{{Type: 0, Text: "Performed no test of security.txt since the direct url of the provider-metadata.json was used."}}}, - { - Num: 9, - Description: "/.well-known/csaf/provider-metadata.json", - Messages: []Message{{Type: 0, Text: "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used."}}}, - { - Num: 10, - Description: "DNS path", - Messages: []Message{{Type: 0, Text: "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used."}}}, - { - Num: 11, - Description: "One folder per year", - Messages: []Message{{Type: 2, Text: fmt.Sprintf("No year folder found in %s/white/avendor-advisory-0004.json", url)}}}, - { - Num: 12, - Description: "index.txt", - Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/index.txt", url)}}}, - { - Num: 13, - Description: "changes.csv", - Messages: []Message{{Type: 0, Text: fmt.Sprintf("Found %s/white/changes.csv", url)}}}, - { - Num: 14, - Description: "Directory listings", - Messages: []Message{{Type: 0, Text: "All directory listings are valid."}}}, - { - Num: 15, - Description: "ROLIE feed", - Messages: []Message{{Type: 2, Text: "ROLIE feed based distribution was not used."}}}, - { - Num: 16, - Description: "ROLIE service document", - Messages: []Message{{Type: 1, Text: "No ROLIE service document found."}}}, - { - Num: 17, - Description: "ROLIE category document", - Messages: []Message{{Type: 1, Text: "No ROLIE category document found."}}}, - { - Num: 18, - Description: "Integrity", - Messages: []Message{{Type: 0, Text: "All checksums match."}}}, - { - Num: 19, - Description: "Signatures", - Messages: []Message{{Type: 0, Text: "All signatures verified."}}}, - { - Num: 20, - Description: "Public OpenPGP Key", - Messages: []Message{{Type: 0, Text: "1 public OpenPGP key(s) loaded."}}}, +func getRequirementTestData(t *testing.T, params testutil.ProviderParams, directoryProvider bool) []Requirement { + path := "../../testdata/processor-requirements/" + if params.EnableSha256 { + path += "sha256-" } + if params.EnableSha512 { + path += "sha512-" + } + if directoryProvider { + path += "directory" + } else { + path += "rolie" + } + path += ".json" + + content, err := os.ReadFile(path) + if err != nil { + t.Fatal(err) + } + + tmplt, err := template.New("base").Parse(string(content)) + if err != nil { + t.Fatal(err) + } + + var output bytes.Buffer + err = tmplt.Execute(&output, params) + if err != nil { + t.Fatal(err) + } + var requirement []Requirement + err = json.Unmarshal(output.Bytes(), &requirement) + if err != nil { + t.Fatal(err) + } + return requirement } func TestShaMarking(t *testing.T) { @@ -110,49 +65,42 @@ func TestShaMarking(t *testing.T) { directoryProvider bool enableSha256 bool enableSha512 bool - expected func(string) []Requirement }{ { name: "deliver sha256 and sha512", directoryProvider: false, enableSha256: true, enableSha512: true, - expected: getBaseRequirements, }, { name: "only deliver sha256", directoryProvider: false, enableSha256: true, enableSha512: false, - expected: getBaseRequirements, }, { name: "only deliver sha512", directoryProvider: false, enableSha256: false, enableSha512: true, - expected: getBaseRequirements, }, { - name: "only deliver sha256 and sha512, directory provider", + name: "deliver sha256 and sha512, directory provider", directoryProvider: true, enableSha256: true, enableSha512: true, - expected: getBaseRequirements, }, { name: "only deliver sha256, directory provider", directoryProvider: true, enableSha256: true, enableSha512: false, - expected: getBaseRequirements, }, { name: "only deliver sha512, directory provider", directoryProvider: true, enableSha256: false, enableSha512: true, - expected: getBaseRequirements, }, } @@ -191,11 +139,16 @@ func TestShaMarking(t *testing.T) { if err != nil { t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err) } - expected := test.expected(serverURL) - for i, got := range report.Domains[0].Requirements { - want := expected[i] - if !reflect.DeepEqual(*got, want) { - t.Errorf("SHA marking %v: Expected %v, got %v", test.name, want, *got) + expected := getRequirementTestData(t, + testutil.ProviderParams{ + URL: serverURL, + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + }, + test.directoryProvider) + for i, want := range report.Domains[0].Requirements { + if !reflect.DeepEqual(expected[i], *want) { + t.Errorf("SHA marking %v: Expected %v, got %v", test.name, *want, expected[i]) } } diff --git a/testdata/processor-requirements/sha256-directory.json b/testdata/processor-requirements/sha256-directory.json new file mode 100644 index 0000000..a106977 --- /dev/null +++ b/testdata/processor-requirements/sha256-directory.json @@ -0,0 +1,206 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] \ No newline at end of file diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json new file mode 100644 index 0000000..5875174 --- /dev/null +++ b/testdata/processor-requirements/sha256-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-directory.json b/testdata/processor-requirements/sha256-sha512-directory.json new file mode 100644 index 0000000..3e30b9a --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-directory.json @@ -0,0 +1,206 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-rolie.json b/testdata/processor-requirements/sha256-sha512-rolie.json new file mode 100644 index 0000000..5875174 --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha512-directory.json b/testdata/processor-requirements/sha512-directory.json new file mode 100644 index 0000000..e47e1f9 --- /dev/null +++ b/testdata/processor-requirements/sha512-directory.json @@ -0,0 +1,207 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] + diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json new file mode 100644 index 0000000..5875174 --- /dev/null +++ b/testdata/processor-requirements/sha512-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] From df65ad13cbd222d2a2b1784287bd9e2e8b22ba7b Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 10 Dec 2024 10:13:42 +0100 Subject: [PATCH 133/235] Fix: return correct exit code --- cmd/csaf_validator/main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 9e844b7..1a34be0 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -150,7 +150,7 @@ func run(opts *options, files []string) error { } // Exit code is based on validation results - os.Exit(exitCodeAllValid) + os.Exit(exitCode) return nil } From fc404e499c90ead7643bbbbba4b75855bdbfe938 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Fri, 13 Dec 2024 13:33:22 +0100 Subject: [PATCH 134/235] Unfix: Add should-states --- testdata/processor-requirements/sha256-directory.json | 2 +- testdata/processor-requirements/sha256-rolie.json | 2 +- testdata/processor-requirements/sha512-directory.json | 2 +- testdata/processor-requirements/sha512-rolie.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/testdata/processor-requirements/sha256-directory.json b/testdata/processor-requirements/sha256-directory.json index a106977..46b4049 100644 --- a/testdata/processor-requirements/sha256-directory.json +++ b/testdata/processor-requirements/sha256-directory.json @@ -178,7 +178,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 0, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json index 5875174..61c5ccf 100644 --- a/testdata/processor-requirements/sha256-rolie.json +++ b/testdata/processor-requirements/sha256-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 1, + "type": 0, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha512-directory.json b/testdata/processor-requirements/sha512-directory.json index e47e1f9..5102fab 100644 --- a/testdata/processor-requirements/sha512-directory.json +++ b/testdata/processor-requirements/sha512-directory.json @@ -178,7 +178,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 0, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json index 5875174..61c5ccf 100644 --- a/testdata/processor-requirements/sha512-rolie.json +++ b/testdata/processor-requirements/sha512-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 1, + "type": 0, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] From a3d6d6acfb3fed53967ae8c024ddc2b565bd284b Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Dec 2024 14:26:00 +0100 Subject: [PATCH 135/235] Downgrade error to info in directory hash fetching --- cmd/csaf_checker/processor.go | 9 +++++++-- cmd/csaf_checker/processor_test.go | 6 +++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 7972e2b..eed561a 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -776,8 +776,13 @@ func (p *processor) integrity( continue } if res.StatusCode != http.StatusOK { - p.badIntegrities.error("Fetching %s failed: Status code %d (%s)", - hashFile, res.StatusCode, res.Status) + if f.IsDirectory() { + p.badIntegrities.info("Fetching %s failed: Status code %d (%s)", + hashFile, res.StatusCode, res.Status) + } else { + p.badIntegrities.error("Fetching %s failed: Status code %d (%s)", + hashFile, res.StatusCode, res.Status) + } continue } h, err := func() ([]byte, error) { diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index c4fb532..ea5aed5 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -146,9 +146,9 @@ func TestShaMarking(t *testing.T) { EnableSha512: test.enableSha512, }, test.directoryProvider) - for i, want := range report.Domains[0].Requirements { - if !reflect.DeepEqual(expected[i], *want) { - t.Errorf("SHA marking %v: Expected %v, got %v", test.name, *want, expected[i]) + for i, got := range report.Domains[0].Requirements { + if !reflect.DeepEqual(expected[i], *got) { + t.Errorf("SHA marking %v: Expected %v, got %v", test.name, expected[i], *got) } } From ebd96011fcfd38a6a6c8c82ab2a9e99d8aee3f8c Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Dec 2024 14:38:49 +0100 Subject: [PATCH 136/235] Revert new requirement 17 test Changing the ROLIE category fetching warning to info can be addressed later. --- testdata/processor-requirements/sha256-rolie.json | 2 +- testdata/processor-requirements/sha512-rolie.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json index 61c5ccf..5875174 100644 --- a/testdata/processor-requirements/sha256-rolie.json +++ b/testdata/processor-requirements/sha256-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 0, + "type": 1, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json index 61c5ccf..5875174 100644 --- a/testdata/processor-requirements/sha512-rolie.json +++ b/testdata/processor-requirements/sha512-rolie.json @@ -172,7 +172,7 @@ "description": "ROLIE category document", "messages": [ { - "type": 0, + "type": 1, "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" } ] From 9dd4b7fc8dca06e7eb87e54da60680fd4f8a6b41 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Dec 2024 15:54:39 +0100 Subject: [PATCH 137/235] Add tests for no hash given or available --- cmd/csaf_checker/processor.go | 10 +- cmd/csaf_checker/processor_test.go | 101 ++++++--- internal/testutil/testutil.go | 10 +- .../processor-requirements/directory.json | 210 +++++++++++++++++ testdata/processor-requirements/rolie.json | 210 +++++++++++++++++ ...256-sha512-forbid-hash-fetching-rolie.json | 214 ++++++++++++++++++ 6 files changed, 711 insertions(+), 44 deletions(-) create mode 100644 testdata/processor-requirements/directory.json create mode 100644 testdata/processor-requirements/rolie.json create mode 100644 testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index eed561a..b913864 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -20,7 +20,6 @@ import ( "fmt" "io" "log" - "log/slog" "net/http" "net/url" "path/filepath" @@ -586,14 +585,11 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { switch { case sha256 == "" && sha512 == "": - slog.Error("No hash listed on ROLIE feed", "file", url) - return + p.badROLIEFeed.error("No hash listed on ROLIE feed %s", url) case sign == "": - slog.Error("No signature listed on ROLIE feed", "file", url) - return - default: - file = csaf.PlainAdvisoryFile{Path: url, SHA256: sha256, SHA512: sha512, Sign: sign} + p.badROLIEFeed.error("No signature listed on ROLIE feed %s", url) } + file = csaf.PlainAdvisoryFile{Path: url, SHA256: sha256, SHA512: sha512, Sign: sign} files = append(files, file) }) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index ea5aed5..5b0241e 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -29,6 +29,9 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct if params.EnableSha512 { path += "sha512-" } + if params.ForbidHashFetching { + path += "forbid-hash-fetching-" + } if directoryProvider { path += "directory" } else { @@ -61,46 +64,74 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct func TestShaMarking(t *testing.T) { tests := []struct { - name string - directoryProvider bool - enableSha256 bool - enableSha512 bool + name string + directoryProvider bool + enableSha256 bool + enableSha512 bool + forbidHashFetching bool }{ { - name: "deliver sha256 and sha512", - directoryProvider: false, - enableSha256: true, - enableSha512: true, + name: "deliver sha256 and sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidHashFetching: false, }, { - name: "only deliver sha256", - directoryProvider: false, - enableSha256: true, - enableSha512: false, + name: "enable sha256 and sha512, forbid fetching", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidHashFetching: true, }, { - name: "only deliver sha512", - directoryProvider: false, - enableSha256: false, - enableSha512: true, + name: "only deliver sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: false, + forbidHashFetching: false, }, { - name: "deliver sha256 and sha512, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: true, + name: "only deliver sha512", + directoryProvider: false, + enableSha256: false, + enableSha512: true, + forbidHashFetching: false, }, { - name: "only deliver sha256, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: false, + name: "deliver sha256 and sha512, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: true, + forbidHashFetching: false, }, { - name: "only deliver sha512, directory provider", - directoryProvider: true, - enableSha256: false, - enableSha512: true, + name: "only deliver sha256, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: false, + forbidHashFetching: false, + }, + { + name: "only deliver sha512, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: true, + forbidHashFetching: false, + }, + { + name: "no hash", + directoryProvider: false, + enableSha256: false, + enableSha512: false, + forbidHashFetching: false, + }, + { + name: "no hash, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: false, + forbidHashFetching: false, }, } @@ -111,9 +142,10 @@ func TestShaMarking(t *testing.T) { tt.Parallel() serverURL := "" params := testutil.ProviderParams{ - URL: "", - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidHashFetching: test.forbidHashFetching, } server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) defer server.Close() @@ -141,9 +173,10 @@ func TestShaMarking(t *testing.T) { } expected := getRequirementTestData(t, testutil.ProviderParams{ - URL: serverURL, - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, + URL: serverURL, + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidHashFetching: test.forbidHashFetching, }, test.directoryProvider) for i, got := range report.Domains[0].Requirements { diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go index 455d217..e933742 100644 --- a/internal/testutil/testutil.go +++ b/internal/testutil/testutil.go @@ -18,9 +18,10 @@ import ( // ProviderParams configures the test provider. type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool + URL string + EnableSha256 bool + EnableSha512 bool + ForbidHashFetching bool } // ProviderHandler returns a test provider handler with the specified configuration. @@ -49,6 +50,9 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle w.Header().Add("Content-Type", "text/html") case strings.HasSuffix(path, ".json"): w.Header().Add("Content-Type", "application/json") + case (strings.HasSuffix(path, ".sha256") || strings.HasSuffix(path, ".sha512")) && params.ForbidHashFetching: + w.WriteHeader(http.StatusForbidden) + return case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: w.WriteHeader(http.StatusNotFound) return diff --git a/testdata/processor-requirements/directory.json b/testdata/processor-requirements/directory.json new file mode 100644 index 0000000..ed61fcc --- /dev/null +++ b/testdata/processor-requirements/directory.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/index.txt" + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 0, + "text": "Found {{.URL}}/white/changes.csv" + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 0, + "text": "All directory listings are valid." + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "ROLIE feed based distribution was not used." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 1, + "text": "No ROLIE service document found." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "No ROLIE category document found." + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 404 (404 Not Found)" + }, + { + "type": 0, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] \ No newline at end of file diff --git a/testdata/processor-requirements/rolie.json b/testdata/processor-requirements/rolie.json new file mode 100644 index 0000000..cd65a7e --- /dev/null +++ b/testdata/processor-requirements/rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 2, + "text": "No hash listed on ROLIE feed {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 0, + "text": "All checksums match." + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json new file mode 100644 index 0000000..03359f0 --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json @@ -0,0 +1,214 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 403 (403 Forbidden)" + }, + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 403 (403 Forbidden)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] From b1a76207636a7c312c94344b44546116f31c5641 Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 16 Dec 2024 12:23:10 +0100 Subject: [PATCH 138/235] Extend processor SHA fetching tests Allow to forbid individual hashes from downloading. This allows to for testing the behavior, if one of the hashes could not be downloaded. --- cmd/csaf_checker/processor_test.go | 119 +++++++++--------- internal/testutil/testutil.go | 14 ++- ...12-forbid-sha256-forbid-sha512-rolie.json} | 0 3 files changed, 68 insertions(+), 65 deletions(-) rename testdata/processor-requirements/{sha256-sha512-forbid-hash-fetching-rolie.json => sha256-sha512-forbid-sha256-forbid-sha512-rolie.json} (100%) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 5b0241e..9e3f112 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -29,8 +29,11 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct if params.EnableSha512 { path += "sha512-" } - if params.ForbidHashFetching { - path += "forbid-hash-fetching-" + if params.ForbidSha256 { + path += "forbid-sha256-" + } + if params.ForbidSha512 { + path += "forbid-sha512-" } if directoryProvider { path += "directory" @@ -64,74 +67,68 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct func TestShaMarking(t *testing.T) { tests := []struct { - name string - directoryProvider bool - enableSha256 bool - enableSha512 bool - forbidHashFetching bool + name string + directoryProvider bool + enableSha256 bool + enableSha512 bool + forbidSha256 bool + forbidSha512 bool }{ { - name: "deliver sha256 and sha512", - directoryProvider: false, - enableSha256: true, - enableSha512: true, - forbidHashFetching: false, + name: "deliver sha256 and sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, }, { - name: "enable sha256 and sha512, forbid fetching", - directoryProvider: false, - enableSha256: true, - enableSha512: true, - forbidHashFetching: true, + name: "enable sha256 and sha512, forbid fetching", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidSha256: true, + forbidSha512: true, }, { - name: "only deliver sha256", - directoryProvider: false, - enableSha256: true, - enableSha512: false, - forbidHashFetching: false, + name: "only deliver sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: false, }, { - name: "only deliver sha512", - directoryProvider: false, - enableSha256: false, - enableSha512: true, - forbidHashFetching: false, + name: "only deliver sha512", + directoryProvider: false, + enableSha256: false, + enableSha512: true, }, { - name: "deliver sha256 and sha512, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: true, - forbidHashFetching: false, + name: "deliver sha256 and sha512, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: true, }, { - name: "only deliver sha256, directory provider", - directoryProvider: true, - enableSha256: true, - enableSha512: false, - forbidHashFetching: false, + name: "only deliver sha256, directory provider", + directoryProvider: true, + enableSha256: true, + enableSha512: false, }, { - name: "only deliver sha512, directory provider", - directoryProvider: true, - enableSha256: false, - enableSha512: true, - forbidHashFetching: false, + name: "only deliver sha512, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: true, }, { - name: "no hash", - directoryProvider: false, - enableSha256: false, - enableSha512: false, - forbidHashFetching: false, + name: "no hash", + directoryProvider: false, + enableSha256: false, + enableSha512: false, }, { - name: "no hash, directory provider", - directoryProvider: true, - enableSha256: false, - enableSha512: false, - forbidHashFetching: false, + name: "no hash, directory provider", + directoryProvider: true, + enableSha256: false, + enableSha512: false, }, } @@ -142,10 +139,11 @@ func TestShaMarking(t *testing.T) { tt.Parallel() serverURL := "" params := testutil.ProviderParams{ - URL: "", - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, - ForbidHashFetching: test.forbidHashFetching, + URL: "", + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidSha256: test.forbidSha256, + ForbidSha512: test.forbidSha512, } server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider)) defer server.Close() @@ -173,10 +171,11 @@ func TestShaMarking(t *testing.T) { } expected := getRequirementTestData(t, testutil.ProviderParams{ - URL: serverURL, - EnableSha256: test.enableSha256, - EnableSha512: test.enableSha512, - ForbidHashFetching: test.forbidHashFetching, + URL: serverURL, + EnableSha256: test.enableSha256, + EnableSha512: test.enableSha512, + ForbidSha256: test.forbidSha256, + ForbidSha512: test.forbidSha512, }, test.directoryProvider) for i, got := range report.Domains[0].Requirements { diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go index e933742..c7bad68 100644 --- a/internal/testutil/testutil.go +++ b/internal/testutil/testutil.go @@ -18,10 +18,11 @@ import ( // ProviderParams configures the test provider. type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool - ForbidHashFetching bool + URL string + EnableSha256 bool + EnableSha512 bool + ForbidSha256 bool + ForbidSha512 bool } // ProviderHandler returns a test provider handler with the specified configuration. @@ -50,7 +51,10 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle w.Header().Add("Content-Type", "text/html") case strings.HasSuffix(path, ".json"): w.Header().Add("Content-Type", "application/json") - case (strings.HasSuffix(path, ".sha256") || strings.HasSuffix(path, ".sha512")) && params.ForbidHashFetching: + case (strings.HasSuffix(path, ".sha256")) && params.ForbidSha256: + w.WriteHeader(http.StatusForbidden) + return + case strings.HasSuffix(path, ".sha512") && params.ForbidSha512: w.WriteHeader(http.StatusForbidden) return case strings.HasSuffix(path, ".sha256") && directoryProvider && !params.EnableSha256: diff --git a/testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha256-forbid-sha512-rolie.json similarity index 100% rename from testdata/processor-requirements/sha256-sha512-forbid-hash-fetching-rolie.json rename to testdata/processor-requirements/sha256-sha512-forbid-sha256-forbid-sha512-rolie.json From d38150c6a0d334300dfb3391964ea051c66aa4ce Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 16 Dec 2024 12:57:28 +0100 Subject: [PATCH 139/235] Add testdata for individual hash forbidden tests --- cmd/csaf_checker/processor_test.go | 16 ++ .../sha256-sha512-forbid-sha256-rolie.json | 210 ++++++++++++++++++ .../sha256-sha512-forbid-sha512-rolie.json | 210 ++++++++++++++++++ 3 files changed, 436 insertions(+) create mode 100644 testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json create mode 100644 testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 9e3f112..0710f32 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -88,6 +88,22 @@ func TestShaMarking(t *testing.T) { forbidSha256: true, forbidSha512: true, }, + { + name: "enable sha256 and sha512, forbid sha256", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidSha256: true, + forbidSha512: false, + }, + { + name: "enable sha256 and sha512, forbid sha512", + directoryProvider: false, + enableSha256: true, + enableSha512: true, + forbidSha256: false, + forbidSha512: true, + }, { name: "only deliver sha256", directoryProvider: false, diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json new file mode 100644 index 0000000..2a1f2a8 --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 403 (403 Forbidden)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json new file mode 100644 index 0000000..2a4c98f --- /dev/null +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json @@ -0,0 +1,210 @@ +[ + { + "num": 1, + "description": "Valid CSAF documents", + "messages": [ + { + "type": 1, + "text": "No remote validator configured" + }, + { + "type": 0, + "text": "All advisories validated fine against the schema." + } + ] + }, + { + "num": 2, + "description": "Filename", + "messages": [ + { + "type": 0, + "text": "All found filenames are conforming." + } + ] + }, + { + "num": 3, + "description": "TLS", + "messages": [ + { + "type": 0, + "text": "All tested URLs were HTTPS." + } + ] + }, + { + "num": 4, + "description": "TLP:WHITE", + "messages": [ + { + "type": 0, + "text": "All advisories labeled TLP:WHITE were freely accessible." + } + ] + }, + { + "num": 5, + "description": "TLP:AMBER and TLP:RED", + "messages": [ + { + "type": 0, + "text": "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility." + } + ] + }, + { + "num": 6, + "description": "Redirects", + "messages": [ + { + "type": 0, + "text": "No redirections found." + } + ] + }, + { + "num": 7, + "description": "provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Found good provider metadata." + } + ] + }, + { + "num": 8, + "description": "security.txt", + "messages": [ + { + "type": 0, + "text": "Performed no test of security.txt since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 9, + "description": "/.well-known/csaf/provider-metadata.json", + "messages": [ + { + "type": 0, + "text": "Performed no test on whether the provider-metadata.json is available under the .well-known path since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 10, + "description": "DNS path", + "messages": [ + { + "type": 0, + "text": "Performed no test on the contents of https://csaf.data.security.DOMAIN since the direct url of the provider-metadata.json was used." + } + ] + }, + { + "num": 11, + "description": "One folder per year", + "messages": [ + { + "type": 2, + "text": "No year folder found in {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 12, + "description": "index.txt", + "messages": [ + { + "type": 2, + "text": "Fetching index.txt failed: {{.URL}}/index.txt not found." + } + ] + }, + { + "num": 13, + "description": "changes.csv", + "messages": [ + { + "type": 2, + "text": "Fetching changes.csv failed: {{.URL}}/changes.csv not found." + } + ] + }, + { + "num": 14, + "description": "Directory listings", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/ failed. Status code 404 (404 Not Found)" + }, + { + "type": 2, + "text": "Not listed advisories: {{.URL}}/white/avendor-advisory-0004.json" + } + ] + }, + { + "num": 15, + "description": "ROLIE feed", + "messages": [ + { + "type": 0, + "text": "All checked ROLIE feeds validated fine." + } + ] + }, + { + "num": 16, + "description": "ROLIE service document", + "messages": [ + { + "type": 0, + "text": "ROLIE service document validated fine." + } + ] + }, + { + "num": 17, + "description": "ROLIE category document", + "messages": [ + { + "type": 1, + "text": "Fetching {{.URL}}/white/category-white.json failed. Status code 404 (404 Not Found)" + } + ] + }, + { + "num": 18, + "description": "Integrity", + "messages": [ + { + "type": 2, + "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 403 (403 Forbidden)" + } + ] + }, + { + "num": 19, + "description": "Signatures", + "messages": [ + { + "type": 0, + "text": "All signatures verified." + } + ] + }, + { + "num": 20, + "description": "Public OpenPGP Key", + "messages": [ + { + "type": 0, + "text": "1 public OpenPGP key(s) loaded." + } + ] + } +] From bc5d149f74d2ce5e7ed03316141a31eafbd80ea1 Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 16 Dec 2024 19:28:24 +0100 Subject: [PATCH 140/235] Use exit code 1 for general errors, fix documentation --- cmd/csaf_validator/main.go | 2 +- docs/csaf_validator.md | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 1a34be0..346180b 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -23,7 +23,7 @@ import ( ) const ( - exitCodeSchemaInvalid = 1 << iota + exitCodeSchemaInvalid = 2 << iota exitCodeNoRemoteValidator exitCodeFailedRemoteValidation exitCodeAllValid = 0 diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index 64ded6d..a0e00bb 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -5,9 +5,10 @@ is a tool to validate local advisories files against the JSON Schema and an opti ### Exit codes If no fatal error occurs the program will exit with an exit code `n` with the following conditions: - `n == 0`: all valid -- `(n / 2) % 1 == 1`: schema validation failed -- `(n / 4) % 1 == 1`: no remote validator configured -- `(n / 8) % 1 == 1`: failure in remote validation +- `(n & 1) > 0`: general error, see logs +- `(n & 2) > 0`: schema validation failed +- `(n & 4) > 0`: no remote validator configured +- `(n & 8) > 0`: failure in remote validation ### Usage From 95ff418a270d618ffc2b6fb661e702cf7639d75f Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Wed, 18 Dec 2024 08:55:48 +0100 Subject: [PATCH 141/235] fix: Content-Type header for JSON responses * Remove `charset=utf-8` parameter, which is not allowed for JSON, according to rfc8259. --- cmd/csaf_provider/controller.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_provider/controller.go b/cmd/csaf_provider/controller.go index 7f64fe2..f04b7bd 100644 --- a/cmd/csaf_provider/controller.go +++ b/cmd/csaf_provider/controller.go @@ -174,7 +174,7 @@ func (c *controller) web( // writeJSON sets the header for the response and writes the JSON encoding of the given "content". // It logs out an error message in case of an error. func writeJSON(rw http.ResponseWriter, content any, code int) { - rw.Header().Set("Content-type", "application/json; charset=utf-8") + rw.Header().Set("Content-type", "application/json") rw.Header().Set("X-Content-Type-Options", "nosniff") rw.WriteHeader(code) if err := json.NewEncoder(rw).Encode(content); err != nil { From d8e903587a8744b51227da17867505da75a44c41 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 18 Dec 2024 15:37:58 +0100 Subject: [PATCH 142/235] Warn only if the other hash could be fetched --- cmd/csaf_checker/processor.go | 28 +++++++++++++------ .../sha256-sha512-forbid-sha256-rolie.json | 2 +- .../sha256-sha512-forbid-sha512-rolie.json | 2 +- 3 files changed, 22 insertions(+), 10 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index b913864..224e225 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -757,6 +757,9 @@ func (p *processor) integrity( hashes = append(hashes, hash{"SHA512", f.SHA512URL, s512.Sum(nil)}) } + couldFetchHash := false + hashFetchErrors := []string{} + for _, x := range hashes { hu, err := url.Parse(x.url()) if err != nil { @@ -768,19 +771,15 @@ func (p *processor) integrity( p.checkTLS(hashFile) if res, err = client.Get(hashFile); err != nil { - p.badIntegrities.error("Fetching %s failed: %v.", hashFile, err) + hashFetchErrors = append(hashFetchErrors, fmt.Sprintf("Fetching %s failed: %v.", hashFile, err)) continue } if res.StatusCode != http.StatusOK { - if f.IsDirectory() { - p.badIntegrities.info("Fetching %s failed: Status code %d (%s)", - hashFile, res.StatusCode, res.Status) - } else { - p.badIntegrities.error("Fetching %s failed: Status code %d (%s)", - hashFile, res.StatusCode, res.Status) - } + hashFetchErrors = append(hashFetchErrors, fmt.Sprintf("Fetching %s failed: Status code %d (%s)", + hashFile, res.StatusCode, res.Status)) continue } + couldFetchHash = true h, err := func() ([]byte, error) { defer res.Body.Close() return util.HashFromReader(res.Body) @@ -798,6 +797,19 @@ func (p *processor) integrity( x.ext, u, hashFile) } } + + msgType := ErrorType + // Log only as warning, if the other hash could be fetched + if couldFetchHash { + msgType = WarnType + } + if f.IsDirectory() { + msgType = InfoType + } + for _, fetchError := range hashFetchErrors { + p.badIntegrities.add(msgType, fetchError) + } + // Check signature su, err := url.Parse(f.SignURL()) if err != nil { diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json index 2a1f2a8..72a173a 100644 --- a/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha256-rolie.json @@ -182,7 +182,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 1, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha256 failed: Status code 403 (403 Forbidden)" } ] diff --git a/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json index 2a4c98f..1ab8f1e 100644 --- a/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json +++ b/testdata/processor-requirements/sha256-sha512-forbid-sha512-rolie.json @@ -182,7 +182,7 @@ "description": "Integrity", "messages": [ { - "type": 2, + "type": 1, "text": "Fetching {{.URL}}/white/avendor-advisory-0004.json.sha512 failed: Status code 403 (403 Forbidden)" } ] From 8fc7f5bfad0c6022cbcc07cec36b875cb4ad292e Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 7 Jan 2025 12:23:40 +0100 Subject: [PATCH 143/235] Make documentation more explicit --- docs/csaf_validator.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/csaf_validator.md b/docs/csaf_validator.md index a0e00bb..87ec831 100644 --- a/docs/csaf_validator.md +++ b/docs/csaf_validator.md @@ -3,9 +3,11 @@ is a tool to validate local advisories files against the JSON Schema and an optional remote validator. ### Exit codes + If no fatal error occurs the program will exit with an exit code `n` with the following conditions: + - `n == 0`: all valid -- `(n & 1) > 0`: general error, see logs +- `(n & 1) > 0`: a general error occurred, all other flags are unset (see logs for more information) - `(n & 2) > 0`: schema validation failed - `(n & 4) > 0`: no remote validator configured - `(n & 8) > 0`: failure in remote validation From b8a5fa72d5d164b5996ec068de9c5e5e9bac15c5 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 8 Jan 2025 08:49:42 +0100 Subject: [PATCH 144/235] Fix nil check in downloader --- cmd/csaf_downloader/downloader.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index c8d92c1..ba6ccff 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -133,7 +133,7 @@ func (d *downloader) httpClient() util.Client { client := util.Client(&hClient) // Overwrite for testing purposes - if client != nil { + if d.client != nil { client = *d.client } From 9275a37a9faa07943b326ebded09559ef36a1084 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 8 Jan 2025 08:50:30 +0100 Subject: [PATCH 145/235] Format --- cmd/csaf_downloader/downloader.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index ba6ccff..88a63c2 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -514,7 +514,8 @@ nextAdvisory: url: file.SHA512URL(), warn: true, hashType: algSha512, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512))}) + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512)), + }) } else { slog.Info("SHA512 not present") } @@ -523,7 +524,8 @@ nextAdvisory: url: file.SHA256URL(), warn: true, hashType: algSha256, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256))}) + preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256)), + }) } else { slog.Info("SHA256 not present") } From b6721e1d5ad3b2f4f4d6d37501a4b74cd665a2bd Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Fri, 10 Jan 2025 11:42:54 +0100 Subject: [PATCH 146/235] Add check for missing either sha256 or sha512 hashes only --- cmd/csaf_checker/processor.go | 4 ++++ testdata/processor-requirements/sha256-rolie.json | 2 +- testdata/processor-requirements/sha512-rolie.json | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 224e225..5c4f66e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -584,6 +584,10 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { var file csaf.AdvisoryFile switch { + case sha256 == "" && sha512 != "": + p.badROLIEFeed.info("%s has no sha256 hash file listed", url) + case sha256 != "" && sha512 == "": + p.badROLIEFeed.info("%s has no sha512 hash file listed", url) case sha256 == "" && sha512 == "": p.badROLIEFeed.error("No hash listed on ROLIE feed %s", url) case sign == "": diff --git a/testdata/processor-requirements/sha256-rolie.json b/testdata/processor-requirements/sha256-rolie.json index 5875174..4ed47f1 100644 --- a/testdata/processor-requirements/sha256-rolie.json +++ b/testdata/processor-requirements/sha256-rolie.json @@ -153,7 +153,7 @@ "messages": [ { "type": 0, - "text": "All checked ROLIE feeds validated fine." + "text": "{{.URL}}/white/avendor-advisory-0004.json has no sha512 hash file listed" } ] }, diff --git a/testdata/processor-requirements/sha512-rolie.json b/testdata/processor-requirements/sha512-rolie.json index 5875174..a2a195d 100644 --- a/testdata/processor-requirements/sha512-rolie.json +++ b/testdata/processor-requirements/sha512-rolie.json @@ -153,7 +153,7 @@ "messages": [ { "type": 0, - "text": "All checked ROLIE feeds validated fine." + "text": "{{.URL}}/white/avendor-advisory-0004.json has no sha256 hash file listed" } ] }, From 028f468d6f25f2e47d96fb1a5d924d3e22ab5949 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 23 Jan 2025 10:32:13 +0100 Subject: [PATCH 147/235] Fix typo in error message Closes #608 --- cmd/csaf_checker/reporters.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_checker/reporters.go b/cmd/csaf_checker/reporters.go index 157eabe..9cd3fc8 100644 --- a/cmd/csaf_checker/reporters.go +++ b/cmd/csaf_checker/reporters.go @@ -178,7 +178,7 @@ func (r *tlpAmberRedReporter) report(p *processor, domain *Domain) { return } if len(p.badAmberRedPermissions) == 0 { - req.message(InfoType, "All tested advisories labeled TLP:WHITE or TLP:RED were access-protected.") + req.message(InfoType, "All tested advisories labeled TLP:AMBER or TLP:RED were access-protected.") return } req.Messages = p.badAmberRedPermissions From 59d2cef0826080f9bf7bd60332c15ec614153834 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 23 Jan 2025 11:53:57 +0100 Subject: [PATCH 148/235] Fix typos --- cmd/csaf_validator/main.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 346180b..3250388 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -107,7 +107,7 @@ func run(opts *options, files []string) error { log.Printf("error: loading %q as JSON failed: %v\n", file, err) continue } - // Validate agsinst Schema. + // Validate against Schema. validationErrs, err := csaf.ValidateCSAF(doc) if err != nil { log.Printf("error: validating %q against schema failed: %v\n", @@ -124,7 +124,7 @@ func run(opts *options, files []string) error { fmt.Printf("%q passes the schema validation.\n", file) } - // Check filename agains ID + // Check filename against ID if err := util.IDMatchesFilename(eval, doc, filepath.Base(file)); err != nil { log.Printf("%s: %s.\n", file, err) continue From 6e8c2ecc059090865dd6bc48bc4ff0371757c8ee Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 23 Jan 2025 12:22:11 +0100 Subject: [PATCH 149/235] Check remote validator even if file validation fails This makes it consistent with the handling of schema validation. --- cmd/csaf_validator/main.go | 1 - 1 file changed, 1 deletion(-) diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index 3250388..b3a0855 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -127,7 +127,6 @@ func run(opts *options, files []string) error { // Check filename against ID if err := util.IDMatchesFilename(eval, doc, filepath.Base(file)); err != nil { log.Printf("%s: %s.\n", file, err) - continue } // Validate against remote validator. From 84026b682d80e1edcc3ca8a8346c69a7e8e56059 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Tue, 28 Jan 2025 17:41:54 +0100 Subject: [PATCH 150/235] Update README.md to exchange csaf.io until it is fixed --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 463b1d9..8f0c5f3 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ # csaf -Implements a [CSAF](https://csaf.io/) +Implements a [CSAF](https://oasis-open.github.io/csaf-documentation/) ([specification v2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html) and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html)) trusted provider, checker, aggregator and downloader. From 7d74543bbbf7cc3f5051f6fef3a84c97347d5eba Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 29 Jan 2025 09:02:18 +0100 Subject: [PATCH 151/235] Fix: Now give errors if lookup methods fail, refactor accordingly --- cmd/csaf_checker/processor.go | 72 ++++++++++++++++------------------- 1 file changed, 33 insertions(+), 39 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 5d1b69b..e07f5ad 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1340,49 +1340,57 @@ func (p *processor) checkSecurityFolder(folder string) string { // checkDNS checks if the "csaf.data.security.domain.tld" DNS record is available // and serves the "provider-metadata.json". // It returns an empty string if all checks are passed, otherwise the errormessage. -func (p *processor) checkDNS(domain string) string { +func (p *processor) checkDNS(domain string) { + + p.badDNSPath.use() client := p.httpClient() path := "https://csaf.data.security." + domain res, err := client.Get(path) if err != nil { - return fmt.Sprintf("Fetching %s failed: %v", path, err) + p.badDNSPath.add(ErrorType, + fmt.Sprintf("Fetching %s failed: %v", path, err)) + return } if res.StatusCode != http.StatusOK { - return fmt.Sprintf("Fetching %s failed. Status code %d (%s)", - path, res.StatusCode, res.Status) - + p.badDNSPath.add(ErrorType, + fmt.Sprintf("Fetching %s failed. Status code %d (%s)", + path, res.StatusCode, res.Status)) } hash := sha256.New() defer res.Body.Close() content, err := io.ReadAll(res.Body) if err != nil { - return fmt.Sprintf("Error while reading the response from %s", path) + p.badDNSPath.add(ErrorType, + fmt.Sprintf("Error while reading the response from %s", path)) } hash.Write(content) if !bytes.Equal(hash.Sum(nil), p.pmd256) { - return fmt.Sprintf("%s does not serve the same provider-metadata.json as previously found", path) + p.badDNSPath.add(ErrorType, + fmt.Sprintf("%s does not serve the same provider-metadata.json as previously found", + path)) } - return "" } -// checkWellknownMetadataReporter checks if the provider-metadata.json file is +// checkWellknown checks if the provider-metadata.json file is // available under the /.well-known/csaf/ directory. Returns the errormessage if // an error was encountered, or an empty string otherwise -func (p *processor) checkWellknown(domain string) string { +func (p *processor) checkWellknown(domain string) { + + p.badWellknownMetadata.use() client := p.httpClient() path := "https://" + domain + "/.well-known/csaf/provider-metadata.json" res, err := client.Get(path) if err != nil { - return fmt.Sprintf("Fetching %s failed: %v", path, err) + p.badWellknownMetadata.add(ErrorType, + fmt.Sprintf("Fetching %s failed: %v", path, err)) } if res.StatusCode != http.StatusOK { - return fmt.Sprintf("Fetching %s failed. Status code %d (%s)", - path, res.StatusCode, res.Status) + p.badWellknownMetadata.add(ErrorType, fmt.Sprintf("Fetching %s failed. Status code %d (%s)", + path, res.StatusCode, res.Status)) } - return "" } // checkWellknownSecurityDNS @@ -1401,50 +1409,36 @@ func (p *processor) checkWellknown(domain string) string { // In that case, errors are returned. func (p *processor) checkWellknownSecurityDNS(domain string) error { - warningsW := p.checkWellknown(domain) + p.checkWellknown(domain) + p.checkDNS(domain) + // Security check for well known (default) and legacy location - warningsS, sDMessage := p.checkSecurity(domain, false) + warnings, sDMessage := p.checkSecurity(domain, false) // if the security.txt under .well-known was not okay // check for a security.txt within its legacy location sLMessage := "" - if warningsS == 1 { - warningsS, sLMessage = p.checkSecurity(domain, true) + if warnings == 1 { + warnings, sLMessage = p.checkSecurity(domain, true) } - warningsD := p.checkDNS(domain) - p.badWellknownMetadata.use() p.badSecurity.use() - p.badDNSPath.use() - - var kind MessageType - if warningsS != 1 || warningsD == "" || warningsW == "" { - kind = WarnType - } else { - kind = ErrorType - } // Info, Warning or Error depending on kind and warningS - kindSD := kind - if warningsS == 0 { + kindSD := WarnType + if warnings == 0 { kindSD = InfoType } - kindSL := kind - if warningsS == 2 { + kindSL := ErrorType + if warnings == 2 { kindSL = InfoType } - if warningsW != "" { - p.badWellknownMetadata.add(kind, warningsW) - } p.badSecurity.add(kindSD, sDMessage) // only if the well-known security.txt was not successful: // report about the legacy location - if warningsS != 0 { + if warnings != 0 { p.badSecurity.add(kindSL, sLMessage) } - if warningsD != "" { - p.badDNSPath.add(kind, warningsD) - } return nil } From 02787b24b799113b769b9ce3bfaeeb66b435340e Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 29 Jan 2025 09:26:59 +0100 Subject: [PATCH 152/235] Update comments, clean up security check --- cmd/csaf_checker/processor.go | 44 +++++++++++++++++------------------ 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index e07f5ad..cb38bda 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1339,7 +1339,6 @@ func (p *processor) checkSecurityFolder(folder string) string { // checkDNS checks if the "csaf.data.security.domain.tld" DNS record is available // and serves the "provider-metadata.json". -// It returns an empty string if all checks are passed, otherwise the errormessage. func (p *processor) checkDNS(domain string) { p.badDNSPath.use() @@ -1373,8 +1372,7 @@ func (p *processor) checkDNS(domain string) { } // checkWellknown checks if the provider-metadata.json file is -// available under the /.well-known/csaf/ directory. Returns the errormessage if -// an error was encountered, or an empty string otherwise +// available under the /.well-known/csaf/ directory. func (p *processor) checkWellknown(domain string) { p.badWellknownMetadata.use() @@ -1402,15 +1400,13 @@ func (p *processor) checkWellknown(domain string) { // 4. Finally it checks if the "csaf.data.security.domain.tld" DNS record // is available and serves the "provider-metadata.json". // -// / -// If all three checks fail, errors are given, -// otherwise warnings for all failed checks. -// The function returns nil, unless errors outside the checks were found. -// In that case, errors are returned. +// For the security.txt checks, it first checks the default location. +// Should this lookup fail, a warning is will be given and a lookup +// for the legacy location will be made. If this fails as well, then an +// error is given. func (p *processor) checkWellknownSecurityDNS(domain string) error { p.checkWellknown(domain) - p.checkDNS(domain) // Security check for well known (default) and legacy location warnings, sDMessage := p.checkSecurity(domain, false) @@ -1423,22 +1419,24 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { p.badSecurity.use() - // Info, Warning or Error depending on kind and warningS - kindSD := WarnType - if warnings == 0 { - kindSD = InfoType - } - kindSL := ErrorType - if warnings == 2 { - kindSL = InfoType + // Report about Securitytxt: + // Only report about Legacy if default was succesful (0). + // Report default and legacy as errors if neither was succesful (1). + // Warn about missing security in the default position if not found + // but found in the legacy location, and inform about finding it there (2). + switch warnings { + case 0: + p.badSecurity.add(InfoType, sDMessage) + case 1: + p.badSecurity.add(ErrorType, sDMessage) + p.badSecurity.add(ErrorType, sLMessage) + case 2: + p.badSecurity.add(WarnType, sDMessage) + p.badSecurity.add(InfoType, sLMessage) } - p.badSecurity.add(kindSD, sDMessage) - // only if the well-known security.txt was not successful: - // report about the legacy location - if warnings != 0 { - p.badSecurity.add(kindSL, sLMessage) - } + p.checkDNS(domain) + return nil } From 82a6929e4dd9aea3743cb905e415665825f0dc89 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 29 Jan 2025 09:41:16 +0100 Subject: [PATCH 153/235] Fix: Poor phrasing corrected --- cmd/csaf_checker/processor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index cb38bda..d6f0f6b 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1420,7 +1420,7 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { p.badSecurity.use() // Report about Securitytxt: - // Only report about Legacy if default was succesful (0). + // Only report about default location if it was succesful (0). // Report default and legacy as errors if neither was succesful (1). // Warn about missing security in the default position if not found // but found in the legacy location, and inform about finding it there (2). From 6e02de974e537ace9cd08179225a715674f8f096 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 25 Feb 2025 15:03:38 +0100 Subject: [PATCH 154/235] update release workflow dependencies and so glibc * Update runner to ubuntu-22.04 which is the eldest to be supported by github from 2025-04-01. * Update github actions and go version needed. --- .github/workflows/release.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d1e370f..f77c9e3 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,23 +7,23 @@ on: jobs: releases-matrix: name: Release Go binaries - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 permissions: contents: write steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v5 with: - go-version: '^1.21.0' + go-version: '^1.23.6' - name: Build run: make dist - name: Upload release assets - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: files: | dist/csaf-*.zip From a4a90f4f9274b295c27bfb6df255e6b2a5134f45 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 25 Feb 2025 15:07:34 +0100 Subject: [PATCH 155/235] update go version to 1.23 --- .github/workflows/itest.yml | 6 +++--- README.md | 2 +- docs/Development.md | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index 9cc4c6b..b537b39 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -7,9 +7,9 @@ jobs: steps: - name: Set up Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v5 with: - go-version: 1.22.0 + go-version: '^1.23.6' - name: Set up Node.js uses: actions/setup-node@v3 @@ -17,7 +17,7 @@ jobs: node-version: 16 - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Execute the scripts run: | diff --git a/README.md b/README.md index 8f0c5f3..b76bf95 100644 --- a/README.md +++ b/README.md @@ -79,7 +79,7 @@ Download the binaries from the most recent release assets on Github. ### Build from sources -- A recent version of **Go** (1.22+) should be installed. [Go installation](https://go.dev/doc/install) +- A recent version of **Go** (1.23+) should be installed. [Go installation](https://go.dev/doc/install) - Clone the repository `git clone https://github.com/gocsaf/csaf.git ` diff --git a/docs/Development.md b/docs/Development.md index 5c4df22..bc71c2c 100644 --- a/docs/Development.md +++ b/docs/Development.md @@ -3,7 +3,7 @@ ## Supported Go versions We support the latest version and the one before -the latest version of Go (currently 1.22 and 1.23). +the latest version of Go (currently 1.23 and 1.24). ## Generated files From 3afa8d8b2e908cba70bddde5442240cab5ec9bb9 Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 25 Feb 2025 15:24:24 +0100 Subject: [PATCH 156/235] Upgrade to artifact action v4 --- .github/workflows/itest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index b537b39..8bc87d5 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -36,7 +36,7 @@ jobs: shell: bash - name: Upload test results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: checker-results path: | From e91bdec201822e1e334582a5dde0388e92d74994 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Mon, 3 Mar 2025 17:31:21 +0100 Subject: [PATCH 157/235] Add example for iterating product id and product helper (#617) * Add example for iterating product id and product helper * simplify code a bit * Remove newline --------- Co-authored-by: Sascha L. Teichmann --- examples/product_lister/main.go | 141 ++++++++++++++++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100644 examples/product_lister/main.go diff --git a/examples/product_lister/main.go b/examples/product_lister/main.go new file mode 100644 index 0000000..5ad26a9 --- /dev/null +++ b/examples/product_lister/main.go @@ -0,0 +1,141 @@ +// Package main implements a simple demo program to +// work with the csaf library. +package main + +import ( + "encoding/json" + "flag" + "fmt" + "log" + "os" + + "github.com/gocsaf/csaf/v3/csaf" +) + +func main() { + flag.Usage = func() { + if _, err := fmt.Fprintf(flag.CommandLine.Output(), + "Usage:\n %s [OPTIONS] files...\n\nOptions:\n", os.Args[0]); err != nil { + log.Fatalf("error: %v\n", err) + } + flag.PrintDefaults() + } + printProductIdentHelper := flag.Bool("print_ident_helper", false, "print product helper mapping") + flag.Parse() + + files := flag.Args() + if len(files) == 0 { + log.Println("No files given.") + return + } + + var printer func(*csaf.Advisory) error + if *printProductIdentHelper { + printer = printProductIdentHelperMapping + } else { + printer = printProductIDMapping + } + + if err := run(files, printer); err != nil { + log.Fatalf("error: %v\n", err) + } +} + +// visitFullProductNames iterates all full product names in the advisory. +func visitFullProductNames( + adv *csaf.Advisory, + visit func(*csaf.FullProductName), +) { + // Iterate over all full product names + if fpns := adv.ProductTree.FullProductNames; fpns != nil { + for _, fpn := range *fpns { + if fpn != nil && fpn.ProductID != nil { + visit(fpn) + } + } + } + + // Iterate over branches recursively + var recBranch func(b *csaf.Branch) + recBranch = func(b *csaf.Branch) { + if b == nil { + return + } + if fpn := b.Product; fpn != nil && fpn.ProductID != nil { + visit(fpn) + + } + for _, c := range b.Branches { + recBranch(c) + } + } + for _, b := range adv.ProductTree.Branches { + recBranch(b) + } + + // Iterate over relationships + if rels := adv.ProductTree.RelationShips; rels != nil { + for _, rel := range *rels { + if rel != nil { + if fpn := rel.FullProductName; fpn != nil && fpn.ProductID != nil { + visit(fpn) + } + } + } + } +} + +// run applies fn to all loaded advisories. +func run(files []string, fn func(*csaf.Advisory) error) error { + for _, file := range files { + adv, err := csaf.LoadAdvisory(file) + if err != nil { + return fmt.Errorf("loading %q failed: %w", file, err) + } + if err := fn(adv); err != nil { + return err + } + } + return nil +} + +// printJSON serializes v as indented JSON to stdout. +func printJSON(v any) error { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(v) +} + +// printProductIDMapping prints all product ids with their name and identification helper. +func printProductIDMapping(adv *csaf.Advisory) error { + type productNameHelperMapping struct { + FullProductName *csaf.FullProductName `json:"product"` + ProductIdentificationHelper *csaf.ProductIdentificationHelper `json:"product_identification_helper"` + } + + productIDMap := map[csaf.ProductID][]productNameHelperMapping{} + visitFullProductNames(adv, func(fpn *csaf.FullProductName) { + productIDMap[*fpn.ProductID] = append(productIDMap[*fpn.ProductID], productNameHelperMapping{ + FullProductName: fpn, + ProductIdentificationHelper: fpn.ProductIdentificationHelper, + }) + }) + return printJSON(productIDMap) +} + +// printProductIdentHelperMapping prints all product identifier helper with their product id. +func printProductIdentHelperMapping(adv *csaf.Advisory) error { + type productIdentIDMapping struct { + ProductNameHelperMapping csaf.ProductIdentificationHelper `json:"product_identification_helper"` + ProductID *csaf.ProductID `json:"product_id"` + } + + productIdentMap := []productIdentIDMapping{} + visitFullProductNames(adv, func(fpn *csaf.FullProductName) { + productIdentMap = append(productIdentMap, productIdentIDMapping{ + ProductNameHelperMapping: *fpn.ProductIdentificationHelper, + ProductID: fpn.ProductID, + }) + }) + return printJSON(productIdentMap) +} From 24f9af7f26bf558ec92dedc86317a1267b169896 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 5 Mar 2025 09:55:11 +0100 Subject: [PATCH 158/235] Add documentation for externally signed documents Closes #607 --- docs/csaf_uploader.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/csaf_uploader.md b/docs/csaf_uploader.md index 0e68aa9..76af99f 100644 --- a/docs/csaf_uploader.md +++ b/docs/csaf_uploader.md @@ -43,6 +43,12 @@ E.g. uploading a csaf-document which asks to enter a password interactively. +To upload an already signed document, use the `-x` option +```bash +# Note: The file CSAF-document-1.json.asc must exist +./csaf_uploader -x -a upload -I -t white -u https://localhost/cgi-bin/csaf_provider.go CSAF-document-1.json +``` + By default csaf_uploader will try to load a config file from the following places: From ec0c3f9c2ca9a9080f876944ddac5f0a583b5b11 Mon Sep 17 00:00:00 2001 From: Marcus Perlick <38723273+marcusperlick@users.noreply.github.com> Date: Mon, 10 Mar 2025 09:24:49 +0100 Subject: [PATCH 159/235] Fix potential leak of HTTP response body in downloadJSON of csaf_aggregator (#618) --- cmd/csaf_aggregator/client.go | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/cmd/csaf_aggregator/client.go b/cmd/csaf_aggregator/client.go index 916baa5..abd475c 100644 --- a/cmd/csaf_aggregator/client.go +++ b/cmd/csaf_aggregator/client.go @@ -10,6 +10,7 @@ package main import ( "errors" + "fmt" "io" "net/http" @@ -20,13 +21,14 @@ var errNotFound = errors.New("not found") func downloadJSON(c util.Client, url string, found func(io.Reader) error) error { res, err := c.Get(url) - if err != nil || res.StatusCode != http.StatusOK || + if err != nil { + return fmt.Errorf("not found: %w", err) + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK || res.Header.Get("Content-Type") != "application/json" { // ignore this as it is expected. return errNotFound } - return func() error { - defer res.Body.Close() - return found(res.Body) - }() + return found(res.Body) } From 3cfafa8263112d79d489dbc170004fcf3498340b Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 10 Mar 2025 11:11:34 +0100 Subject: [PATCH 160/235] Report error in checker if content type is not correct Related: #606 --- cmd/csaf_checker/processor.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index c0aafb2..397c88e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -678,9 +678,9 @@ func (p *processor) integrity( continue } - // Warn if we do not get JSON. + // Error if we do not get JSON. if ct := res.Header.Get("Content-Type"); ct != "application/json" { - lg(WarnType, + lg(ErrorType, "The content type of %s should be 'application/json' but is '%s'", u, ct) } From 534d6f049f9ed5cf54c75c8a2ede3a23511868f4 Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 10 Mar 2025 12:02:44 +0100 Subject: [PATCH 161/235] Add content-type error report test --- cmd/csaf_checker/processor_test.go | 53 ++++++++++++++++++++++++++++++ internal/testutil/testutil.go | 18 ++++++---- 2 files changed, 65 insertions(+), 6 deletions(-) diff --git a/cmd/csaf_checker/processor_test.go b/cmd/csaf_checker/processor_test.go index 0710f32..4d13908 100644 --- a/cmd/csaf_checker/processor_test.go +++ b/cmd/csaf_checker/processor_test.go @@ -14,6 +14,8 @@ import ( "net/http/httptest" "os" "reflect" + "slices" + "strings" "testing" "text/template" @@ -65,6 +67,57 @@ func getRequirementTestData(t *testing.T, params testutil.ProviderParams, direct return requirement } +func TestContentTypeReport(t *testing.T) { + serverURL := "" + params := testutil.ProviderParams{ + URL: "", + EnableSha256: true, + EnableSha512: true, + ForbidSha256: true, + ForbidSha512: true, + JSONContentType: "application/json; charset=utf-8", + } + server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, false)) + defer server.Close() + + serverURL = server.URL + params.URL = server.URL + + hClient := server.Client() + client := util.Client(hClient) + + cfg := config{} + err := cfg.prepare() + if err != nil { + t.Fatalf("SHA marking config failed: %v", err) + } + p, err := newProcessor(&cfg) + if err != nil { + t.Fatalf("could not init downloader: %v", err) + } + p.client = client + + report, err := p.run([]string{serverURL + "/provider-metadata.json"}) + if err != nil { + t.Errorf("Content-Type-Report: Expected no error, got: %v", err) + } + + got := report.Domains[0].Requirements + idx := slices.IndexFunc(got, func(e *Requirement) bool { + return e.Num == 7 + }) + if idx == -1 { + t.Error("Content-Type-Report: Could not find requirement") + } else { + message := got[idx].Messages[0] + if message.Type != ErrorType || !strings.Contains(message.Text, "should be 'application/json'") { + t.Errorf("Content-Type-Report: Content Type Error, got %v", message) + } + } + + p.close() +} + func TestShaMarking(t *testing.T) { tests := []struct { name string diff --git a/internal/testutil/testutil.go b/internal/testutil/testutil.go index c7bad68..a8186a4 100644 --- a/internal/testutil/testutil.go +++ b/internal/testutil/testutil.go @@ -18,11 +18,12 @@ import ( // ProviderParams configures the test provider. type ProviderParams struct { - URL string - EnableSha256 bool - EnableSha512 bool - ForbidSha256 bool - ForbidSha512 bool + URL string + EnableSha256 bool + EnableSha512 bool + ForbidSha256 bool + ForbidSha512 bool + JSONContentType string } // ProviderHandler returns a test provider handler with the specified configuration. @@ -35,6 +36,11 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle path += "simple-rolie-provider" } + jsonContenType := "application/json" + if params.JSONContentType != "" { + jsonContenType = params.JSONContentType + } + path += r.URL.Path if strings.HasSuffix(r.URL.Path, "/") { @@ -50,7 +56,7 @@ func ProviderHandler(params *ProviderParams, directoryProvider bool) http.Handle case strings.HasSuffix(path, ".html"): w.Header().Add("Content-Type", "text/html") case strings.HasSuffix(path, ".json"): - w.Header().Add("Content-Type", "application/json") + w.Header().Add("Content-Type", jsonContenType) case (strings.HasSuffix(path, ".sha256")) && params.ForbidSha256: w.WriteHeader(http.StatusForbidden) return From 4429dd69857d59fe0ef2c6ca5a6974ac76062e50 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 13 Mar 2025 18:23:28 +0100 Subject: [PATCH 162/235] feat: add access-control-allow-origin header .. for better access from web applications. improve #479 --- docs/scripts/DNSConfigForItest.sh | 2 ++ docs/scripts/setupProviderForITest.sh | 11 +++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/docs/scripts/DNSConfigForItest.sh b/docs/scripts/DNSConfigForItest.sh index f7b85f0..9196af3 100755 --- a/docs/scripts/DNSConfigForItest.sh +++ b/docs/scripts/DNSConfigForItest.sh @@ -28,6 +28,8 @@ echo " location = / { try_files /.well-known/csaf/provider-metadata.json =404; + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; } access_log /var/log/nginx/dns-domain_access.log; diff --git a/docs/scripts/setupProviderForITest.sh b/docs/scripts/setupProviderForITest.sh index f9d7d18..2b6e6d1 100755 --- a/docs/scripts/setupProviderForITest.sh +++ b/docs/scripts/setupProviderForITest.sh @@ -56,11 +56,14 @@ location /cgi-bin/ { sudo sed -i "/^server {/a\ include fcgiwrap.conf;" $NGINX_CONFIG_PATH echo " - # For atomic directory switches - disable_symlinks off; + # For atomic directory switches + disable_symlinks off; - # directory listings - autoindex on; + # directory listings + autoindex on; + + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; " > locationConfig.txt sudo sed -i "/^\s*location \/ {/r locationConfig.txt" $NGINX_CONFIG_PATH # Insert config inside location{} ./DNSConfigForItest.sh From 527fe71992797095f99e95c02f69711dc629e03d Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 13 Mar 2025 18:30:38 +0100 Subject: [PATCH 163/235] feat: set acao header * adapt provider-setup.md to changes for the acao header. --- docs/provider-setup.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/provider-setup.md b/docs/provider-setup.md index 48c29d0..2fdf1e3 100644 --- a/docs/provider-setup.md +++ b/docs/provider-setup.md @@ -78,6 +78,9 @@ server { # directory listings autoindex on; + + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; } # enable CGI @@ -155,7 +158,7 @@ Again replacing `{clientCert.crt}` and `{clientKey.pem}` accordingly. To let nginx resolves the DNS record `csaf.data.security.domain.tld` to fulfill the [Requirement 10](https://docs.oasis-open.org/csaf/csaf/v2.0/cs01/csaf-v2.0-cs01.html#7110-requirement-10-dns-path) configure a new server block (virtual host) in a separated file under `/etc/nginx/available-sites/{DNSNAME}` like following: - + ```sh server { From 8163f578511f417a0c1b9b4b58de8574b7916736 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Fri, 14 Mar 2025 10:05:56 +0100 Subject: [PATCH 164/235] Compare changes dates (#609) * Feat: Compare dates in changes.csv to those within the files if existent * Fix: remove debug output and fix typo * Make map handling consistent * Improve: refactor time extraction * fix: some syntax fixes * Small nits * Fix: Check changes before stopping the scan of already tested advisories * Revert "Fix: Check changes before stopping the scan of already tested advisories - bad way to solve the problem, can cause problems" This reverts commit d38dc285cc8e664dc97f81418b2b52174e83e68b. * fix: delay checking of changes dates so it is not skipped most of the time * Fix time comparison --------- Co-authored-by: koplas Co-authored-by: Sascha L. Teichmann --- cmd/csaf_checker/processor.go | 85 ++++++++++++++++++++++++++--------- 1 file changed, 65 insertions(+), 20 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 397c88e..ae79133 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -53,6 +53,8 @@ type processor struct { pmd any keys *crypto.KeyRing labelChecker labelChecker + timesChanges map[string]time.Time + timesAdv map[string]time.Time invalidAdvisories topicMessages badFilenames topicMessages @@ -188,6 +190,9 @@ func newProcessor(cfg *config) (*processor, error) { advisories: map[csaf.TLPLabel]util.Set[string]{}, whiteAdvisories: map[identifier]bool{}, }, + timesAdv: map[string]time.Time{}, + timesChanges: map[string]time.Time{}, + noneTLS: util.Set[string]{}, }, nil } @@ -202,14 +207,14 @@ func (p *processor) close() { // reset clears the fields values of the given processor. func (p *processor) reset() { p.redirects = nil - p.noneTLS = nil - for k := range p.alreadyChecked { - delete(p.alreadyChecked, k) - } p.pmdURL = "" p.pmd256 = nil p.pmd = nil p.keys = nil + clear(p.alreadyChecked) + clear(p.noneTLS) + clear(p.timesAdv) + clear(p.timesChanges) p.invalidAdvisories.reset() p.badFilenames.reset() @@ -371,9 +376,6 @@ func (p *processor) checkDomain(domain string) error { // checkTLS parses the given URL to check its schema, as a result it sets // the value of "noneTLS" field if it is not HTTPS. func (p *processor) checkTLS(u string) { - if p.noneTLS == nil { - p.noneTLS = util.Set[string]{} - } if x, err := url.Parse(u); err == nil && x.Scheme != "https" { p.noneTLS.Add(u) } @@ -617,6 +619,8 @@ func makeAbsolute(base *url.URL) func(*url.URL) *url.URL { var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`) +// integrity checks several csaf.AdvisoryFiles for formal +// mistakes, from conforming filenames to invalid advisories. func (p *processor) integrity( files []csaf.AdvisoryFile, base string, @@ -732,19 +736,19 @@ func (p *processor) integrity( // Check if file is in the right folder. p.badFolders.use() - if date, err := p.expr.Eval( - `$.document.tracking.initial_release_date`, doc); err != nil { - p.badFolders.error( - "Extracting 'initial_release_date' from %s failed: %v", u, err) - } else if text, ok := date.(string); !ok { - p.badFolders.error("'initial_release_date' is not a string in %s", u) - } else if d, err := time.Parse(time.RFC3339, text); err != nil { - p.badFolders.error( - "Parsing 'initial_release_date' as RFC3339 failed in %s: %v", u, err) - } else if folderYear == nil { + switch date, fault := p.extractTime(doc, `initial_release_date`, u); { + case fault != "": + p.badFolders.error(fault) + case folderYear == nil: p.badFolders.error("No year folder found in %s", u) - } else if d.UTC().Year() != *folderYear { - p.badFolders.error("%s should be in folder %d", u, d.UTC().Year()) + case date.UTC().Year() != *folderYear: + p.badFolders.error("%s should be in folder %d", u, date.UTC().Year()) + } + current, fault := p.extractTime(doc, `current_release_date`, u) + if fault != "" { + p.badChanges.error(fault) + } else { + p.timesAdv[f.URL()] = current } // Check hashes @@ -861,9 +865,48 @@ func (p *processor) integrity( } } + // If we tested an existing changes.csv + if len(p.timesAdv) > 0 && p.badChanges.used() { + // Iterate over all files again + for _, f := range files { + // If there was no previous error when extracting times from advisories and we have a valid time + if timeAdv, ok := p.timesAdv[f.URL()]; ok { + // If there was no previous error when extracting times from changes and the file was listed in changes.csv + if timeCha, ok := p.timesChanges[f.URL()]; ok { + // check if the time matches + if !timeAdv.Equal(timeCha) { + // if not, give an error and remove the pair so it isn't reported multiple times should integrity be called again + p.badChanges.error("Current release date in changes.csv and %s is not identical.", f.URL()) + delete(p.timesAdv, f.URL()) + delete(p.timesChanges, f.URL()) + } + } + } + } + } + return nil } +// extractTime extracts a time.Time value from a json document and returns it and an empty string or zero time alongside +// a string representing the error message that prevented obtaining the proper time value. +func (p *processor) extractTime(doc any, value string, u any) (time.Time, string) { + filter := "$.document.tracking." + value + date, err := p.expr.Eval(filter, doc) + if err != nil { + return time.Time{}, fmt.Sprintf("Extracting '%s' from %s failed: %v", value, u, err) + } + text, ok := date.(string) + if !ok { + return time.Time{}, fmt.Sprintf("'%s' is not a string in %s", value, u) + } + d, err := time.Parse(time.RFC3339, text) + if err != nil { + return time.Time{}, fmt.Sprintf("Parsing '%s' as RFC3339 failed in %s: %v", value, u, err) + } + return d, "" +} + // checkIndex fetches the "index.txt" and calls "checkTLS" method for HTTPS checks. // It extracts the file names from the file and passes them to "integrity" function. // It returns error if fetching/reading the file(s) fails, otherwise nil. @@ -991,8 +1034,10 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] - times, files = append(times, t), + times, files = + append(times, t), append(files, csaf.DirectoryAdvisoryFile{Path: path}) + p.timesChanges[path] = t } return times, files, nil }() From 17f6a3ac7eb7fac39825fb1ae8c25398d288fedc Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 14 Mar 2025 10:26:19 +0100 Subject: [PATCH 165/235] Fix inconsistent format --- docs/scripts/setupProviderForITest.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/scripts/setupProviderForITest.sh b/docs/scripts/setupProviderForITest.sh index 2b6e6d1..ae6c6fc 100755 --- a/docs/scripts/setupProviderForITest.sh +++ b/docs/scripts/setupProviderForITest.sh @@ -56,14 +56,14 @@ location /cgi-bin/ { sudo sed -i "/^server {/a\ include fcgiwrap.conf;" $NGINX_CONFIG_PATH echo " - # For atomic directory switches - disable_symlinks off; + # For atomic directory switches + disable_symlinks off; - # directory listings - autoindex on; + # directory listings + autoindex on; - # allow others web applications to get the static information - add_header Access-Control-Allow-Origin "*"; + # allow others web applications to get the static information + add_header Access-Control-Allow-Origin "*"; " > locationConfig.txt sudo sed -i "/^\s*location \/ {/r locationConfig.txt" $NGINX_CONFIG_PATH # Insert config inside location{} ./DNSConfigForItest.sh From a7821265ca4dfc65ec3966d970047c322900e188 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 17 Mar 2025 08:57:05 +0100 Subject: [PATCH 166/235] Move advisory downloading to download context method --- cmd/csaf_downloader/downloader.go | 616 ++++++++++++++++-------------- 1 file changed, 319 insertions(+), 297 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 3270a88..5af7f5e 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -417,6 +417,320 @@ func (d *downloader) logValidationIssues(url string, errors []string, err error) } } +// downloadContext stores the common context of a downloader. +type downloadContext struct { + d *downloader + client util.Client + data bytes.Buffer + lastDir string + initialReleaseDate time.Time + dateExtract func(any) error + lower string + stats stats + expr *util.PathEval +} + +func newDownloadContext(d *downloader, label csaf.TLPLabel) *downloadContext { + dc := &downloadContext{ + client: d.httpClient(), + lower: strings.ToLower(string(label)), + expr: util.NewPathEval(), + } + dc.dateExtract = util.TimeMatcher(&dc.initialReleaseDate, time.RFC3339) + return dc +} + +func (dc *downloadContext) downloadAdvisory( + file csaf.AdvisoryFile, + errorCh chan<- error, +) error { + u, err := url.Parse(file.URL()) + if err != nil { + dc.stats.downloadFailed++ + slog.Warn("Ignoring invalid URL", + "url", file.URL(), + "error", err) + return nil + } + + if dc.d.cfg.ignoreURL(file.URL()) { + slog.Debug("Ignoring URL", "url", file.URL()) + return nil + } + + // Ignore not conforming filenames. + filename := filepath.Base(u.Path) + if !util.ConformingFileName(filename) { + dc.stats.filenameFailed++ + slog.Warn("Ignoring none conforming filename", + "filename", filename) + return nil + } + + resp, err := dc.client.Get(file.URL()) + if err != nil { + dc.stats.downloadFailed++ + slog.Warn("Cannot GET", + "url", file.URL(), + "error", err) + return nil + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + dc.stats.downloadFailed++ + slog.Warn("Cannot load", + "url", file.URL(), + "status", resp.Status, + "status_code", resp.StatusCode) + return nil + } + + // Warn if we do not get JSON. + if ct := resp.Header.Get("Content-Type"); ct != "application/json" { + slog.Warn("Content type is not 'application/json'", + "url", file.URL(), + "content_type", ct) + } + + var ( + writers []io.Writer + s256, s512 hash.Hash + s256Data, s512Data []byte + remoteSHA256, remoteSHA512 []byte + signData []byte + ) + + hashToFetch := []hashFetchInfo{} + if file.SHA512URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA512URL(), + warn: true, + hashType: algSha512, + preferred: strings.EqualFold(string(dc.d.cfg.PreferredHash), string(algSha512)), + }) + } else { + slog.Info("SHA512 not present") + } + if file.SHA256URL() != "" { + hashToFetch = append(hashToFetch, hashFetchInfo{ + url: file.SHA256URL(), + warn: true, + hashType: algSha256, + preferred: strings.EqualFold(string(dc.d.cfg.PreferredHash), string(algSha256)), + }) + } else { + slog.Info("SHA256 not present") + } + if file.IsDirectory() { + for i := range hashToFetch { + hashToFetch[i].warn = false + } + } + + remoteSHA256, s256Data, remoteSHA512, s512Data = loadHashes(dc.client, hashToFetch) + if remoteSHA512 != nil { + s512 = sha512.New() + writers = append(writers, s512) + } + if remoteSHA256 != nil { + s256 = sha256.New() + writers = append(writers, s256) + } + + // Remember the data as we need to store it to file later. + dc.data.Reset() + writers = append(writers, &dc.data) + + // Download the advisory and hash it. + hasher := io.MultiWriter(writers...) + + var doc any + + tee := io.TeeReader(resp.Body, hasher) + + if err := json.NewDecoder(tee).Decode(&doc); err != nil { + dc.stats.downloadFailed++ + slog.Warn("Downloading failed", + "url", file.URL(), + "error", err) + return nil + } + + // Compare the checksums. + s256Check := func() error { + if s256 != nil && !bytes.Equal(s256.Sum(nil), remoteSHA256) { + dc.stats.sha256Failed++ + return fmt.Errorf("SHA256 checksum of %s does not match", file.URL()) + } + return nil + } + + s512Check := func() error { + if s512 != nil && !bytes.Equal(s512.Sum(nil), remoteSHA512) { + dc.stats.sha512Failed++ + return fmt.Errorf("SHA512 checksum of %s does not match", file.URL()) + } + return nil + } + + // Validate OpenPGP signature. + keysCheck := func() error { + // Only check signature if we have loaded keys. + if dc.d.keys == nil { + return nil + } + var sign *crypto.PGPSignature + sign, signData, err = loadSignature(dc.client, file.SignURL()) + if err != nil { + slog.Warn("Downloading signature failed", + "url", file.SignURL(), + "error", err) + } + if sign != nil { + if err := dc.d.checkSignature(dc.data.Bytes(), sign); err != nil { + if !dc.d.cfg.IgnoreSignatureCheck { + dc.stats.signatureFailed++ + return fmt.Errorf("cannot verify signature for %s: %v", file.URL(), err) + } + } + } + return nil + } + + // Validate against CSAF schema. + schemaCheck := func() error { + if errors, err := csaf.ValidateCSAF(doc); err != nil || len(errors) > 0 { + dc.stats.schemaFailed++ + dc.d.logValidationIssues(file.URL(), errors, err) + return fmt.Errorf("schema validation for %q failed", file.URL()) + } + return nil + } + + // Validate if filename is conforming. + filenameCheck := func() error { + if err := util.IDMatchesFilename(dc.expr, doc, filename); err != nil { + dc.stats.filenameFailed++ + return fmt.Errorf("filename not conforming %s: %s", file.URL(), err) + } + return nil + } + + // Validate against remote validator. + remoteValidatorCheck := func() error { + if dc.d.validator == nil { + return nil + } + rvr, err := dc.d.validator.Validate(doc) + if err != nil { + errorCh <- fmt.Errorf( + "calling remote validator on %q failed: %w", + file.URL(), err) + return nil + } + if !rvr.Valid { + dc.stats.remoteFailed++ + return fmt.Errorf("remote validation of %q failed", file.URL()) + } + return nil + } + + // Run all the validations. + valStatus := notValidatedValidationStatus + for _, check := range []func() error{ + s256Check, + s512Check, + keysCheck, + schemaCheck, + filenameCheck, + remoteValidatorCheck, + } { + if err := check(); err != nil { + slog.Error("Validation check failed", "error", err) + valStatus.update(invalidValidationStatus) + if dc.d.cfg.ValidationMode == validationStrict { + return nil + } + } + } + valStatus.update(validValidationStatus) + + // Send to forwarder + if dc.d.forwarder != nil { + dc.d.forwarder.forward( + filename, dc.data.String(), + valStatus, + string(s256Data), + string(s512Data)) + } + + if dc.d.cfg.NoStore { + // Do not write locally. + if valStatus == validValidationStatus { + dc.stats.succeeded++ + } + return nil + } + + if err := dc.expr.Extract( + `$.document.tracking.initial_release_date`, dc.dateExtract, false, doc, + ); err != nil { + slog.Warn("Cannot extract initial_release_date from advisory", + "url", file.URL()) + dc.initialReleaseDate = time.Now() + } + dc.initialReleaseDate = dc.initialReleaseDate.UTC() + + // Advisories that failed validation are stored in a special folder. + var newDir string + if valStatus != validValidationStatus { + newDir = path.Join(dc.d.cfg.Directory, failedValidationDir) + } else { + newDir = dc.d.cfg.Directory + } + + // Do we have a configured destination folder? + if dc.d.cfg.Folder != "" { + newDir = path.Join(newDir, dc.d.cfg.Folder) + } else { + newDir = path.Join(newDir, dc.lower, strconv.Itoa(dc.initialReleaseDate.Year())) + } + + if newDir != dc.lastDir { + if err := dc.d.mkdirAll(newDir, 0755); err != nil { + errorCh <- err + return nil + } + dc.lastDir = newDir + } + + // Write advisory to file + path := filepath.Join(dc.lastDir, filename) + + // Write data to disk. + for _, x := range []struct { + p string + d []byte + }{ + {path, dc.data.Bytes()}, + {path + ".sha256", s256Data}, + {path + ".sha512", s512Data}, + {path + ".asc", signData}, + } { + if x.d != nil { + if err := os.WriteFile(x.p, x.d, 0644); err != nil { + errorCh <- err + return nil + } + } + } + + dc.stats.succeeded++ + slog.Info("Written advisory", "path", path) + return nil +} + func (d *downloader) downloadWorker( ctx context.Context, wg *sync.WaitGroup, @@ -426,21 +740,11 @@ func (d *downloader) downloadWorker( ) { defer wg.Done() - var ( - client = d.httpClient() - data bytes.Buffer - lastDir string - initialReleaseDate time.Time - dateExtract = util.TimeMatcher(&initialReleaseDate, time.RFC3339) - lower = strings.ToLower(string(label)) - stats = stats{} - expr = util.NewPathEval() - ) + dc := newDownloadContext(d, label) // Add collected stats back to total. - defer d.addStats(&stats) + defer d.addStats(&dc.stats) -nextAdvisory: for { var file csaf.AdvisoryFile var ok bool @@ -452,292 +756,10 @@ nextAdvisory: case <-ctx.Done(): return } - - u, err := url.Parse(file.URL()) - if err != nil { - stats.downloadFailed++ - slog.Warn("Ignoring invalid URL", - "url", file.URL(), - "error", err) - continue + if err := dc.downloadAdvisory(file, errorCh); err != nil { + slog.Error("download terminated", "error", err) + return } - - if d.cfg.ignoreURL(file.URL()) { - slog.Debug("Ignoring URL", "url", file.URL()) - continue - } - - // Ignore not conforming filenames. - filename := filepath.Base(u.Path) - if !util.ConformingFileName(filename) { - stats.filenameFailed++ - slog.Warn("Ignoring none conforming filename", - "filename", filename) - continue - } - - resp, err := client.Get(file.URL()) - if err != nil { - stats.downloadFailed++ - slog.Warn("Cannot GET", - "url", file.URL(), - "error", err) - continue - } - - if resp.StatusCode != http.StatusOK { - stats.downloadFailed++ - slog.Warn("Cannot load", - "url", file.URL(), - "status", resp.Status, - "status_code", resp.StatusCode) - continue - } - - // Warn if we do not get JSON. - if ct := resp.Header.Get("Content-Type"); ct != "application/json" { - slog.Warn("Content type is not 'application/json'", - "url", file.URL(), - "content_type", ct) - } - - var ( - writers []io.Writer - s256, s512 hash.Hash - s256Data, s512Data []byte - remoteSHA256, remoteSHA512 []byte - signData []byte - ) - - hashToFetch := []hashFetchInfo{} - if file.SHA512URL() != "" { - hashToFetch = append(hashToFetch, hashFetchInfo{ - url: file.SHA512URL(), - warn: true, - hashType: algSha512, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha512)), - }) - } else { - slog.Info("SHA512 not present") - } - if file.SHA256URL() != "" { - hashToFetch = append(hashToFetch, hashFetchInfo{ - url: file.SHA256URL(), - warn: true, - hashType: algSha256, - preferred: strings.EqualFold(string(d.cfg.PreferredHash), string(algSha256)), - }) - } else { - slog.Info("SHA256 not present") - } - if file.IsDirectory() { - for i := range hashToFetch { - hashToFetch[i].warn = false - } - } - - remoteSHA256, s256Data, remoteSHA512, s512Data = loadHashes(client, hashToFetch) - if remoteSHA512 != nil { - s512 = sha512.New() - writers = append(writers, s512) - } - if remoteSHA256 != nil { - s256 = sha256.New() - writers = append(writers, s256) - } - - // Remember the data as we need to store it to file later. - data.Reset() - writers = append(writers, &data) - - // Download the advisory and hash it. - hasher := io.MultiWriter(writers...) - - var doc any - - if err := func() error { - defer resp.Body.Close() - tee := io.TeeReader(resp.Body, hasher) - return json.NewDecoder(tee).Decode(&doc) - }(); err != nil { - stats.downloadFailed++ - slog.Warn("Downloading failed", - "url", file.URL(), - "error", err) - continue - } - - // Compare the checksums. - s256Check := func() error { - if s256 != nil && !bytes.Equal(s256.Sum(nil), remoteSHA256) { - stats.sha256Failed++ - return fmt.Errorf("SHA256 checksum of %s does not match", file.URL()) - } - return nil - } - - s512Check := func() error { - if s512 != nil && !bytes.Equal(s512.Sum(nil), remoteSHA512) { - stats.sha512Failed++ - return fmt.Errorf("SHA512 checksum of %s does not match", file.URL()) - } - return nil - } - - // Validate OpenPGP signature. - keysCheck := func() error { - // Only check signature if we have loaded keys. - if d.keys == nil { - return nil - } - var sign *crypto.PGPSignature - sign, signData, err = loadSignature(client, file.SignURL()) - if err != nil { - slog.Warn("Downloading signature failed", - "url", file.SignURL(), - "error", err) - } - if sign != nil { - if err := d.checkSignature(data.Bytes(), sign); err != nil { - if !d.cfg.IgnoreSignatureCheck { - stats.signatureFailed++ - return fmt.Errorf("cannot verify signature for %s: %v", file.URL(), err) - } - } - } - return nil - } - - // Validate against CSAF schema. - schemaCheck := func() error { - if errors, err := csaf.ValidateCSAF(doc); err != nil || len(errors) > 0 { - stats.schemaFailed++ - d.logValidationIssues(file.URL(), errors, err) - return fmt.Errorf("schema validation for %q failed", file.URL()) - } - return nil - } - - // Validate if filename is conforming. - filenameCheck := func() error { - if err := util.IDMatchesFilename(expr, doc, filename); err != nil { - stats.filenameFailed++ - return fmt.Errorf("filename not conforming %s: %s", file.URL(), err) - } - return nil - } - - // Validate against remote validator. - remoteValidatorCheck := func() error { - if d.validator == nil { - return nil - } - rvr, err := d.validator.Validate(doc) - if err != nil { - errorCh <- fmt.Errorf( - "calling remote validator on %q failed: %w", - file.URL(), err) - return nil - } - if !rvr.Valid { - stats.remoteFailed++ - return fmt.Errorf("remote validation of %q failed", file.URL()) - } - return nil - } - - // Run all the validations. - valStatus := notValidatedValidationStatus - for _, check := range []func() error{ - s256Check, - s512Check, - keysCheck, - schemaCheck, - filenameCheck, - remoteValidatorCheck, - } { - if err := check(); err != nil { - slog.Error("Validation check failed", "error", err) - valStatus.update(invalidValidationStatus) - if d.cfg.ValidationMode == validationStrict { - continue nextAdvisory - } - } - } - valStatus.update(validValidationStatus) - - // Send to forwarder - if d.forwarder != nil { - d.forwarder.forward( - filename, data.String(), - valStatus, - string(s256Data), - string(s512Data)) - } - - if d.cfg.NoStore { - // Do not write locally. - if valStatus == validValidationStatus { - stats.succeeded++ - } - continue - } - - if err := expr.Extract( - `$.document.tracking.initial_release_date`, dateExtract, false, doc, - ); err != nil { - slog.Warn("Cannot extract initial_release_date from advisory", - "url", file.URL()) - initialReleaseDate = time.Now() - } - initialReleaseDate = initialReleaseDate.UTC() - - // Advisories that failed validation are stored in a special folder. - var newDir string - if valStatus != validValidationStatus { - newDir = path.Join(d.cfg.Directory, failedValidationDir) - } else { - newDir = d.cfg.Directory - } - - // Do we have a configured destination folder? - if d.cfg.Folder != "" { - newDir = path.Join(newDir, d.cfg.Folder) - } else { - newDir = path.Join(newDir, lower, strconv.Itoa(initialReleaseDate.Year())) - } - - if newDir != lastDir { - if err := d.mkdirAll(newDir, 0755); err != nil { - errorCh <- err - continue - } - lastDir = newDir - } - - // Write advisory to file - path := filepath.Join(lastDir, filename) - - // Write data to disk. - for _, x := range []struct { - p string - d []byte - }{ - {path, data.Bytes()}, - {path + ".sha256", s256Data}, - {path + ".sha512", s512Data}, - {path + ".asc", signData}, - } { - if x.d != nil { - if err := os.WriteFile(x.p, x.d, 0644); err != nil { - errorCh <- err - continue nextAdvisory - } - } - } - - stats.succeeded++ - slog.Info("Written advisory", "path", path) } } From 5437d8127a8245ea5da2d7162c63c844e16156e9 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Mon, 17 Mar 2025 09:10:03 +0100 Subject: [PATCH 167/235] Store downloader in context --- cmd/csaf_downloader/downloader.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 5af7f5e..f0778ee 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -432,6 +432,7 @@ type downloadContext struct { func newDownloadContext(d *downloader, label csaf.TLPLabel) *downloadContext { dc := &downloadContext{ + d: d, client: d.httpClient(), lower: strings.ToLower(string(label)), expr: util.NewPathEval(), From 5709b14650682d1d9e5614ba586d3dc96a0aa27a Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 19 Mar 2025 09:04:19 +0100 Subject: [PATCH 168/235] Extend structured logging usage in aggregator (#622) * Extend structured logging usage in aggregator * Use structured logging in advisories processor * Remove unnecessary inner function * Format * Feat: Add verbose flag to example aggregator toml (in comment) --------- Co-authored-by: JanHoefelmeyer --- cmd/csaf_aggregator/config.go | 15 ++++++++---- cmd/csaf_aggregator/mirror.go | 7 +++--- csaf/advisories.go | 43 +++++++++++++++++------------------ docs/examples/aggregator.toml | 1 + 4 files changed, 36 insertions(+), 30 deletions(-) diff --git a/cmd/csaf_aggregator/config.go b/cmd/csaf_aggregator/config.go index 3c2c46b..55a7193 100644 --- a/cmd/csaf_aggregator/config.go +++ b/cmd/csaf_aggregator/config.go @@ -264,8 +264,14 @@ func (c *config) privateOpenPGPKey() (*crypto.Key, error) { return c.key, c.keyErr } -func (c *config) httpClient(p *provider) util.Client { +// httpLog does structured logging in a [util.LoggingClient]. +func httpLog(method, url string) { + slog.Debug("http", + "method", method, + "url", url) +} +func (c *config) httpClient(p *provider) util.Client { hClient := http.Client{} var tlsConfig tls.Config @@ -310,7 +316,10 @@ func (c *config) httpClient(p *provider) util.Client { } if c.Verbose { - client = &util.LoggingClient{Client: client} + client = &util.LoggingClient{ + Client: client, + Log: httpLog, + } } if p.Rate == nil && c.Rate == nil { @@ -331,7 +340,6 @@ func (c *config) httpClient(p *provider) util.Client { } func (c *config) checkProviders() error { - if !c.AllowSingleProvider && len(c.Providers) < 2 { return errors.New("need at least two providers") } @@ -471,7 +479,6 @@ func (c *config) prepareCertificates() error { // prepare prepares internal state of a loaded configuration. func (c *config) prepare() error { - if len(c.Providers) == 0 { return errors.New("no providers given in configuration") } diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index c90ef68..e7c5154 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -462,8 +462,9 @@ func (w *worker) extractCategories(label string, advisory any) error { expr := cat[len(exprPrefix):] // Compile first to check that the expression is okay. if _, err := w.expr.Compile(expr); err != nil { - fmt.Printf("Compiling category expression %q failed: %v\n", - expr, err) + slog.Error("Compiling category expression failed", + "expr", expr, + "err", err) continue } // Ignore errors here as they result from not matching. @@ -588,12 +589,10 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) if err := os.MkdirAll(yearDir, 0755); err != nil { return err } - //log.Printf("created %s\n", yearDir) yearDirs[year] = yearDir } fname := filepath.Join(yearDir, filename) - //log.Printf("write: %s\n", fname) data := content.Bytes() if err := writeFileHashes( fname, filename, diff --git a/csaf/advisories.go b/csaf/advisories.go index df23935..ef3fea8 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -9,10 +9,10 @@ package csaf import ( + "context" "encoding/csv" "fmt" "io" - "log" "log/slog" "net/http" "net/url" @@ -91,7 +91,7 @@ func (daf DirectoryAdvisoryFile) LogValue() slog.Value { // advisory file names from a given provider metadata. type AdvisoryFileProcessor struct { AgeAccept func(time.Time) bool - Log func(format string, args ...any) + Log func(loglevel slog.Level, format string, args ...any) client util.Client expr *util.PathEval doc any @@ -131,8 +131,8 @@ func (afp *AdvisoryFileProcessor) Process( ) error { lg := afp.Log if lg == nil { - lg = func(format string, args ...any) { - log.Printf("AdvisoryFileProcessor.Process: "+format, args...) + lg = func(loglevel slog.Level, format string, args ...any) { + slog.Log(context.Background(), loglevel, "AdvisoryFileProcessor.Process: "+format, args...) } } @@ -140,7 +140,7 @@ func (afp *AdvisoryFileProcessor) Process( rolie, err := afp.expr.Eval( "$.distributions[*].rolie.feeds", afp.doc) if err != nil { - lg("rolie check failed: %v\n", err) + lg(slog.LevelError, "rolie check failed", "err", err) return err } @@ -152,7 +152,7 @@ func (afp *AdvisoryFileProcessor) Process( if err := util.ReMarshalJSON(&feeds, rolie); err != nil { return err } - lg("Found %d ROLIE feed(s).\n", len(feeds)) + lg(slog.LevelInfo, "Found ROLIE feed(s)", "length", len(feeds)) for _, feed := range feeds { if err := afp.processROLIE(feed, fn); err != nil { @@ -168,12 +168,12 @@ func (afp *AdvisoryFileProcessor) Process( var dirURLs []string if err != nil { - lg("extracting directory URLs failed: %v\n", err) + lg(slog.LevelError, "extracting directory URLs failed", "err", err) } else { var ok bool dirURLs, ok = util.AsStrings(directoryURLs) if !ok { - lg("directory_urls are not strings.\n") + lg(slog.LevelError, "directory_urls are not strings") } } @@ -209,9 +209,8 @@ func (afp *AdvisoryFileProcessor) Process( // prefixed by baseURL/. func (afp *AdvisoryFileProcessor) loadChanges( baseURL string, - lg func(string, ...any), + lg func(slog.Level, string, ...any), ) ([]AdvisoryFile, error) { - base, err := url.Parse(baseURL) if err != nil { return nil, err @@ -244,12 +243,12 @@ func (afp *AdvisoryFileProcessor) loadChanges( return nil, err } if len(r) < 2 { - lg("%q has not enough columns in line %d", line) + lg(slog.LevelError, "Not enough columns", "line", line) continue } t, err := time.Parse(time.RFC3339, r[timeColumn]) if err != nil { - lg("%q has an invalid time stamp in line %d: %v", changesURL, line, err) + lg(slog.LevelError, "Invalid time stamp in line", "url", changesURL, "line", line, "err", err) continue } // Apply date range filtering. @@ -258,7 +257,7 @@ func (afp *AdvisoryFileProcessor) loadChanges( } path := r[pathColumn] if _, err := url.Parse(path); err != nil { - lg("%q contains an invalid URL %q in line %d", changesURL, path, line) + lg(slog.LevelError, "Contains an invalid URL", "url", changesURL, "path", path, "line", line) continue } @@ -279,31 +278,31 @@ func (afp *AdvisoryFileProcessor) processROLIE( } up, err := url.Parse(string(*feed.URL)) if err != nil { - log.Printf("Invalid URL %s in feed: %v.", *feed.URL, err) + slog.Error("Invalid URL in feed", "feed", *feed.URL, "err", err) continue } feedURL := afp.base.ResolveReference(up) - log.Printf("Feed URL: %s\n", feedURL) + slog.Info("Got feed URL", "feed", feedURL) fb, err := util.BaseURL(feedURL) if err != nil { - log.Printf("error: Invalid feed base URL '%s': %v\n", fb, err) + slog.Error("Invalid feed base URL", "url", fb, "err", err) continue } feedBaseURL, err := url.Parse(fb) if err != nil { - log.Printf("error: Cannot parse feed base URL '%s': %v\n", fb, err) + slog.Error("Cannot parse feed base URL", "url", fb, "err", err) continue } res, err := afp.client.Get(feedURL.String()) if err != nil { - log.Printf("error: Cannot get feed '%s'\n", err) + slog.Error("Cannot get feed", "err", err) continue } if res.StatusCode != http.StatusOK { - log.Printf("error: Fetching %s failed. Status code %d (%s)", - feedURL, res.StatusCode, res.Status) + slog.Error("Fetching failed", + "url", feedURL, "status_code", res.StatusCode, "status", res.Status) continue } rfeed, err := func() (*ROLIEFeed, error) { @@ -311,7 +310,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( return LoadROLIEFeed(res.Body) }() if err != nil { - log.Printf("Loading ROLIE feed failed: %v.", err) + slog.Error("Loading ROLIE feed failed", "err", err) continue } @@ -323,7 +322,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( } p, err := url.Parse(u) if err != nil { - log.Printf("error: Invalid URL '%s': %v", u, err) + slog.Error("Invalid URL", "url", u, "err", err) return "" } return feedBaseURL.ResolveReference(p).String() diff --git a/docs/examples/aggregator.toml b/docs/examples/aggregator.toml index 2161079..8d4ee80 100644 --- a/docs/examples/aggregator.toml +++ b/docs/examples/aggregator.toml @@ -5,6 +5,7 @@ web = "/var/csaf_aggregator/html" domain = "https://localhost:9443" rate = 10.0 insecure = true +#verbose = false #openpgp_private_key = #openpgp_public_key = #interim_years = From 0848143a0bbcd83cecf626be7d8379759121de53 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 19 Mar 2025 09:39:07 +0100 Subject: [PATCH 169/235] Update lint (#626) * Update linter * Format * Fix lint --- .github/workflows/go.yml | 6 +++--- cmd/csaf_aggregator/client_test.go | 4 ++-- cmd/csaf_downloader/downloader_test.go | 6 ++---- cmd/csaf_downloader/forwarder.go | 6 +++--- cmd/csaf_provider/main.go | 2 +- internal/options/options_test.go | 9 ++++----- util/file_test.go | 2 +- 7 files changed, 16 insertions(+), 19 deletions(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 95ee8c7..b86309f 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -17,7 +17,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v4 with: - go-version: 'stable' + go-version: "stable" - name: Build run: go build -v ./cmd/... @@ -31,10 +31,10 @@ jobs: gofmt-flags: "-l -d" - name: golint - uses: Jerome1337/golint-action@v1.0.2 + uses: Jerome1337/golint-action@v1.0.3 - name: Revive Action - uses: morphy2k/revive-action@v2.5.1 + uses: morphy2k/revive-action@v2.7.4 - name: Tests run: go test -v ./... diff --git a/cmd/csaf_aggregator/client_test.go b/cmd/csaf_aggregator/client_test.go index fc5b095..3617ce6 100644 --- a/cmd/csaf_aggregator/client_test.go +++ b/cmd/csaf_aggregator/client_test.go @@ -49,10 +49,10 @@ func Test_downloadJSON(t *testing.T) { test := testToRun t.Run(test.name, func(tt *testing.T) { tt.Parallel() - found := func(r io.Reader) error { + found := func(_ io.Reader) error { return nil } - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.Header().Add("Content-Type", test.contentType) w.WriteHeader(test.statusCode) })) diff --git a/cmd/csaf_downloader/downloader_test.go b/cmd/csaf_downloader/downloader_test.go index d7eaae3..1485ec9 100644 --- a/cmd/csaf_downloader/downloader_test.go +++ b/cmd/csaf_downloader/downloader_test.go @@ -24,12 +24,10 @@ import ( func checkIfFileExists(path string, t *testing.T) bool { if _, err := os.Stat(path); err == nil { return true - } else if errors.Is(err, os.ErrNotExist) { - return false - } else { + } else if !errors.Is(err, os.ErrNotExist) { t.Fatalf("Failed to check if file exists: %v", err) - return false } + return false } func TestShaMarking(t *testing.T) { diff --git a/cmd/csaf_downloader/forwarder.go b/cmd/csaf_downloader/forwarder.go index 1598283..ac2c336 100644 --- a/cmd/csaf_downloader/forwarder.go +++ b/cmd/csaf_downloader/forwarder.go @@ -224,12 +224,12 @@ func (f *forwarder) storeFailed(filename, doc, sha256, sha512 string) { // limitedString reads max bytes from reader and returns it as a string. // Longer strings are indicated by "..." as a suffix. -func limitedString(r io.Reader, max int) (string, error) { +func limitedString(r io.Reader, maxLength int) (string, error) { var msg strings.Builder - if _, err := io.Copy(&msg, io.LimitReader(r, int64(max))); err != nil { + if _, err := io.Copy(&msg, io.LimitReader(r, int64(maxLength))); err != nil { return "", err } - if msg.Len() >= max { + if msg.Len() >= maxLength { msg.WriteString("...") } return msg.String(), nil diff --git a/cmd/csaf_provider/main.go b/cmd/csaf_provider/main.go index 6c858c9..3faebfe 100644 --- a/cmd/csaf_provider/main.go +++ b/cmd/csaf_provider/main.go @@ -48,7 +48,7 @@ func main() { cfg, err := loadConfig() if err != nil { - cgi.Serve(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + cgi.Serve(http.HandlerFunc(func(rw http.ResponseWriter, _ *http.Request) { http.Error(rw, "Something went wrong. Check server logs for more details", http.StatusInternalServerError) })) diff --git a/internal/options/options_test.go b/internal/options/options_test.go index 9aab23b..2768e37 100644 --- a/internal/options/options_test.go +++ b/internal/options/options_test.go @@ -37,10 +37,10 @@ func TestParse(t *testing.T) { }, Usage: "[OPTIONS] domain...", HasVersion: func(cfg *config) bool { return cfg.Version }, - SetDefaults: func(cfg *config) { + SetDefaults: func(_ *config) { }, // Re-establish default values if not set. - EnsureDefaults: func(cfg *config) { + EnsureDefaults: func(_ *config) { }, } @@ -157,7 +157,6 @@ func TestErrorCheck(t *testing.T) { return } t.Fatalf("process ran with err %v, want exit status 1", err) - } // TestSecondPassCommandlineParsing checks if the second pass @@ -168,7 +167,7 @@ func TestSecondPassCommandlineParsing(t *testing.T) { os.Args = []string{"cmd"} p := Parser[config]{ - ConfigLocation: func(cfg *config) string { + ConfigLocation: func(_ *config) string { // This is a bit stupid. os.Args = []string{"cmd", "--invalid"} return "data/empty.toml" @@ -188,7 +187,7 @@ func TestSecondPassCommandlineHelp(t *testing.T) { os.Args = []string{"cmd"} p := Parser[config]{ - ConfigLocation: func(cfg *config) string { + ConfigLocation: func(_ *config) string { // This is a bit stupid. os.Args = []string{"cmd", "--help"} return "data/empty.toml" diff --git a/util/file_test.go b/util/file_test.go index 28c5196..ab2a208 100644 --- a/util/file_test.go +++ b/util/file_test.go @@ -155,7 +155,7 @@ func TestMakeUniqFile(t *testing.T) { func Test_mkUniq(t *testing.T) { dir := t.TempDir() - name, err := mkUniq(dir+"/", func(name string) error { + name, err := mkUniq(dir+"/", func(_ string) error { return nil }) if err != nil { From 2c5ef1fd5f47a8c9ad34526a5eef64a2c8b28f9f Mon Sep 17 00:00:00 2001 From: koplas Date: Mon, 24 Mar 2025 13:32:43 +0100 Subject: [PATCH 170/235] Avoid memory leak Move `resp.Body.Close()` before check of status code. Reported by @mgoetzegb here: https://github.com/gocsaf/csaf/pull/625#issuecomment-2744067770 --- cmd/csaf_downloader/downloader.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index f0778ee..bcef357 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -781,11 +781,11 @@ func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, if err != nil { return nil, nil, err } + defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return nil, nil, fmt.Errorf( "fetching signature from '%s' failed: %s (%d)", p, resp.Status, resp.StatusCode) } - defer resp.Body.Close() data, err := io.ReadAll(resp.Body) if err != nil { return nil, nil, err @@ -846,11 +846,11 @@ func loadHash(client util.Client, p string) ([]byte, []byte, error) { if err != nil { return nil, nil, err } + defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return nil, nil, fmt.Errorf( "fetching hash from '%s' failed: %s (%d)", p, resp.Status, resp.StatusCode) } - defer resp.Body.Close() var data bytes.Buffer tee := io.TeeReader(resp.Body, &data) hash, err := util.HashFromReader(tee) From 2f599ab0175d0d89748f4d539afdc51024332b97 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Wed, 2 Apr 2025 17:05:29 +0200 Subject: [PATCH 171/235] Fix aggregator URL handling (#631) * Fix aggregator URL handling Parts of the URL were not path escaped. This results in a wrong URL; if the provider name contains characters that need to be escaped. * Simplify JoinPath usage --- cmd/csaf_aggregator/indices.go | 68 +++++++++++++++++++++----------- cmd/csaf_aggregator/mirror.go | 32 +++++++++------ cmd/csaf_aggregator/processor.go | 13 ++++++ 3 files changed, 78 insertions(+), 35 deletions(-) diff --git a/cmd/csaf_aggregator/indices.go b/cmd/csaf_aggregator/indices.go index 17c8d3a..976d9a3 100644 --- a/cmd/csaf_aggregator/indices.go +++ b/cmd/csaf_aggregator/indices.go @@ -183,19 +183,26 @@ func (w *worker) writeROLIENoSummaries(label string) error { fname := "csaf-feed-tlp-" + labelFolder + ".json" - feedURL := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + labelFolder + "/" + fname + feedURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + feedURL = feedURL.JoinPath(labelFolder, fname) links := []csaf.Link{{ Rel: "self", - HRef: feedURL, + HRef: feedURL.String(), }} if w.provider.serviceDocument(w.processor.cfg) { + serviceURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + serviceURL = serviceURL.JoinPath("service.json") links = append(links, csaf.Link{ - Rel: "service", - HRef: w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/service.json", + Rel: "service", + HRef: serviceURL.String(), }) } @@ -223,8 +230,11 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { fname := "csaf-feed-tlp-" + labelFolder + ".json" - feedURL := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + labelFolder + "/" + fname + feedURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + feedURL = feedURL.JoinPath(labelFolder, fname) entries := make([]*csaf.Entry, len(summaries)) @@ -236,10 +246,13 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { for i := range summaries { s := &summaries[i] - csafURL := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + label + "/" + - strconv.Itoa(s.summary.InitialReleaseDate.Year()) + "/" + - s.filename + csafURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + csafURLString := csafURL.JoinPath(label, + strconv.Itoa(s.summary.InitialReleaseDate.Year()), + s.filename).String() entries[i] = &csaf.Entry{ ID: s.summary.ID, @@ -247,15 +260,15 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { Published: csaf.TimeStamp(s.summary.InitialReleaseDate), Updated: csaf.TimeStamp(s.summary.CurrentReleaseDate), Link: []csaf.Link{ - {Rel: "self", HRef: csafURL}, - {Rel: "hash", HRef: csafURL + ".sha256"}, - {Rel: "hash", HRef: csafURL + ".sha512"}, - {Rel: "signature", HRef: csafURL + ".asc"}, + {Rel: "self", HRef: csafURLString}, + {Rel: "hash", HRef: csafURLString + ".sha256"}, + {Rel: "hash", HRef: csafURLString + ".sha512"}, + {Rel: "signature", HRef: csafURLString + ".asc"}, }, Format: format, Content: csaf.Content{ Type: "application/json", - Src: csafURL, + Src: csafURLString, }, } if s.summary.Summary != "" { @@ -267,14 +280,18 @@ func (w *worker) writeROLIE(label string, summaries []summary) error { links := []csaf.Link{{ Rel: "self", - HRef: feedURL, + HRef: feedURL.String(), }} if w.provider.serviceDocument(w.processor.cfg) { + serviceURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + serviceURL = serviceURL.JoinPath("service.json") links = append(links, csaf.Link{ - Rel: "service", - HRef: w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/service.json", + Rel: "service", + HRef: serviceURL.String(), }) } @@ -344,12 +361,15 @@ func (w *worker) writeService() error { for _, ts := range labels { feedName := "csaf-feed-tlp-" + ts + ".json" - href := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + - w.provider.Name + "/" + ts + "/" + feedName + hrefURL, err := w.getProviderBaseURL() + if err != nil { + return err + } + hrefURL = hrefURL.JoinPath(ts, feedName) collection := csaf.ROLIEServiceWorkspaceCollection{ Title: "CSAF feed (TLP:" + strings.ToUpper(ts) + ")", - HRef: href, + HRef: hrefURL.String(), Categories: categories, } collections = append(collections, collection) diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index e7c5154..1ef5881 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -103,9 +103,13 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) { } // Add us as a mirror. + mirror, err := w.getProviderBaseURL() + if err != nil { + return nil, err + } mirrorURL := csaf.ProviderURL( - fmt.Sprintf("%s/.well-known/csaf-aggregator/%s/provider-metadata.json", - w.processor.cfg.Domain, w.provider.Name)) + mirror.JoinPath("provider-metadata.json").String(), + ) acp.Mirrors = []csaf.ProviderURL{ mirrorURL, @@ -128,8 +132,12 @@ func (w *worker) writeProviderMetadata() error { fname := filepath.Join(w.dir, "provider-metadata.json") + prefixURL, err := w.getProviderBaseURL() + if err != nil { + return err + } pm := csaf.NewProviderMetadataPrefix( - w.processor.cfg.Domain+"/.well-known/csaf-aggregator/"+w.provider.Name, + prefixURL.String(), w.labelsFromSummaries()) // Fill in directory URLs if needed. @@ -139,9 +147,8 @@ func (w *worker) writeProviderMetadata() error { labels = append(labels, label) } sort.Strings(labels) - prefix := w.processor.cfg.Domain + "/.well-known/csaf-aggregator/" + w.provider.Name + "/" for _, label := range labels { - pm.AddDirectoryDistribution(prefix + label) + pm.AddDirectoryDistribution(prefixURL.JoinPath(label).String()) } } @@ -188,9 +195,12 @@ func (w *worker) mirrorPGPKeys(pm *csaf.ProviderMetadata) error { return err } + keyURL, err := w.getProviderBaseURL() + if err != nil { + return err + } localKeyURL := func(fingerprint string) string { - return fmt.Sprintf("%s/.well-known/csaf-aggregator/%s/openpgp/%s.asc", - w.processor.cfg.Domain, w.provider.Name, fingerprint) + return keyURL.JoinPath("openpgp", (fingerprint + ".asc")).String() } for i := range pm.PGPKeys { @@ -240,8 +250,8 @@ func (w *worker) mirrorPGPKeys(pm *csaf.ProviderMetadata) error { } // replace the URL - url := localKeyURL(fingerprint) - pgpKey.URL = &url + u := localKeyURL(fingerprint) + pgpKey.URL = &u } // If we have public key configured copy it into the new folder @@ -308,7 +318,7 @@ func (w *worker) createAggregatorProvider() (*csaf.AggregatorCSAFProvider, error var ( lastUpdated = csaf.TimeStamp(lastUpdatedT) role = csaf.MetadataRole(roleS) - url = csaf.ProviderURL(urlS) + providerURL = csaf.ProviderURL(urlS) ) return &csaf.AggregatorCSAFProvider{ @@ -316,7 +326,7 @@ func (w *worker) createAggregatorProvider() (*csaf.AggregatorCSAFProvider, error LastUpdated: &lastUpdated, Publisher: &pub, Role: &role, - URL: &url, + URL: &providerURL, }, }, nil } diff --git a/cmd/csaf_aggregator/processor.go b/cmd/csaf_aggregator/processor.go index b22e839..0d41df8 100644 --- a/cmd/csaf_aggregator/processor.go +++ b/cmd/csaf_aggregator/processor.go @@ -11,6 +11,7 @@ package main import ( "fmt" "log/slog" + "net/url" "os" "path/filepath" @@ -112,6 +113,18 @@ func (w *worker) locateProviderMetadata(domain string) error { return nil } +// getProviderBaseURL returns the base URL for the provider. +func (w *worker) getProviderBaseURL() (*url.URL, error) { + baseURL, err := url.Parse(w.processor.cfg.Domain) + if err != nil { + return nil, err + } + baseURL = baseURL.JoinPath(".well-known", + "csaf-aggregator", + w.provider.Name) + return baseURL, nil +} + // removeOrphans removes the directories that are not in the providers list. func (p *processor) removeOrphans() error { From 91b5b4543e6577770cf68ad43cab7fc8f331ff05 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 3 Apr 2025 14:41:14 +0200 Subject: [PATCH 172/235] Check if canonical url prefix is valid --- cmd/csaf_provider/config.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/cmd/csaf_provider/config.go b/cmd/csaf_provider/config.go index 826b7bf..5d29b61 100644 --- a/cmd/csaf_provider/config.go +++ b/cmd/csaf_provider/config.go @@ -11,6 +11,7 @@ package main import ( "fmt" "io" + "net/url" "os" "strings" @@ -262,6 +263,14 @@ func loadConfig() (*config, error) { if cfg.CanonicalURLPrefix == "" { cfg.CanonicalURLPrefix = "https://" + os.Getenv("SERVER_NAME") } + // Check if canonical url prefix is invalid + parsedURL, err := url.ParseRequestURI(cfg.CanonicalURLPrefix) + if err != nil { + return nil, err + } + if parsedURL.Scheme != "https" && parsedURL.Scheme != "http" { + return nil, fmt.Errorf("invalid canonical URL: %q", cfg.CanonicalURLPrefix) + } if cfg.TLPs == nil { cfg.TLPs = []tlp{tlpCSAF, tlpWhite, tlpGreen, tlpAmber, tlpRed} From 3ab00e87594ccad74c40534bbad3f4028abdb5f3 Mon Sep 17 00:00:00 2001 From: Christoph Klassen <100708552+cintek@users.noreply.github.com> Date: Wed, 28 May 2025 11:30:46 +0200 Subject: [PATCH 173/235] Remove golint github action We use Revive already which is a replacement for golint and golint isn't maintained anyway. --- .github/workflows/go.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index b86309f..6b07bfd 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -30,9 +30,6 @@ jobs: with: gofmt-flags: "-l -d" - - name: golint - uses: Jerome1337/golint-action@v1.0.3 - - name: Revive Action uses: morphy2k/revive-action@v2.7.4 From fc64bf71650ed878452079c34bab5b78728e409a Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 12 Jun 2025 15:47:24 +0200 Subject: [PATCH 174/235] Upgrade jsonschema to v6 --- csaf/validation.go | 64 +++++++++++++++++++------- go.mod | 32 ++++++------- go.sum | 110 ++++++++++++++++++++++++++++----------------- 3 files changed, 135 insertions(+), 71 deletions(-) diff --git a/csaf/validation.go b/csaf/validation.go index 73e732c..3faf549 100644 --- a/csaf/validation.go +++ b/csaf/validation.go @@ -10,13 +10,17 @@ package csaf import ( "bytes" + "crypto/tls" _ "embed" // Used for embedding. - "io" + "errors" + "fmt" + "net/http" "sort" "strings" "sync" + "time" - "github.com/santhosh-tekuri/jsonschema/v5" + "github.com/santhosh-tekuri/jsonschema/v6" ) //go:embed schema/csaf_json_schema.json @@ -64,13 +68,29 @@ var ( compiledRolieSchema = compiledSchema{url: rolieSchemaURL} ) -// loadURL loads the content of an URL from embedded data or -// falls back to the global loader function of the jsonschema package. -func loadURL(s string) (io.ReadCloser, error) { - loader := func(data []byte) (io.ReadCloser, error) { - return io.NopCloser(bytes.NewReader(data)), nil +type schemaLoader http.Client + +func (l *schemaLoader) loadHTTPURL(url string) (any, error) { + client := (*http.Client)(l) + resp, err := client.Get(url) + if err != nil { + return nil, err } - switch s { + if resp.StatusCode != http.StatusOK { + _ = resp.Body.Close() + return nil, fmt.Errorf("%s returned status code %d", url, resp.StatusCode) + } + defer resp.Body.Close() + + return jsonschema.UnmarshalJSON(resp.Body) +} + +// Load loads the schema from the specified url. +func (l *schemaLoader) Load(url string) (any, error) { + loader := func(data []byte) (any, error) { + return jsonschema.UnmarshalJSON(bytes.NewReader(data)) + } + switch url { case csafSchemaURL: return loader(csafSchema) case cvss20SchemaURL: @@ -86,14 +106,27 @@ func loadURL(s string) (io.ReadCloser, error) { case rolieSchemaURL: return loader(rolieSchema) default: - return jsonschema.LoadURL(s) + // Fallback to http loader + return l.loadHTTPURL(url) } } +func newSchemaLoader(insecure bool) *schemaLoader { + httpLoader := schemaLoader(http.Client{ + Timeout: 15 * time.Second, + }) + if insecure { + httpLoader.Transport = &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + } + return &httpLoader +} + func (cs *compiledSchema) compile() { c := jsonschema.NewCompiler() - c.AssertFormat = true - c.LoadURL = loadURL + c.AssertFormat() + c.UseLoader(newSchemaLoader(false)) cs.compiled, cs.err = c.Compile(cs.url) } @@ -109,7 +142,8 @@ func (cs *compiledSchema) validate(doc any) ([]string, error) { return nil, nil } - valErr, ok := err.(*jsonschema.ValidationError) + var valErr *jsonschema.ValidationError + ok := errors.As(err, &valErr) if !ok { return nil, err } @@ -133,21 +167,21 @@ func (cs *compiledSchema) validate(doc any) ([]string, error) { if pi != pj { return pi < pj } - return errs[i].Error < errs[j].Error + return errs[i].Error.String() < errs[j].Error.String() }) res := make([]string, 0, len(errs)) for i := range errs { e := &errs[i] - if e.Error == "" { + if e.Error == nil { continue } loc := e.InstanceLocation if loc == "" { loc = e.AbsoluteKeywordLocation } - res = append(res, loc+": "+e.Error) + res = append(res, loc+": "+e.Error.String()) } return res, nil diff --git a/go.mod b/go.mod index 1ef2216..5a27126 100644 --- a/go.mod +++ b/go.mod @@ -1,31 +1,33 @@ module github.com/gocsaf/csaf/v3 -go 1.22.9 +go 1.23.0 + +toolchain go1.24.4 require ( - github.com/BurntSushi/toml v1.4.0 + github.com/BurntSushi/toml v1.5.0 github.com/Intevation/gval v1.3.0 github.com/Intevation/jsonpath v0.2.1 - github.com/ProtonMail/gopenpgp/v2 v2.8.0 - github.com/PuerkitoBio/goquery v1.8.1 + github.com/ProtonMail/gopenpgp/v2 v2.9.0 + github.com/PuerkitoBio/goquery v1.10.3 github.com/gofrs/flock v0.12.1 github.com/jessevdk/go-flags v1.6.1 github.com/mitchellh/go-homedir v1.1.0 - github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 - go.etcd.io/bbolt v1.3.11 - golang.org/x/crypto v0.29.0 - golang.org/x/term v0.26.0 - golang.org/x/time v0.8.0 + github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 + go.etcd.io/bbolt v1.4.1 + golang.org/x/crypto v0.39.0 + golang.org/x/term v0.32.0 + golang.org/x/time v0.12.0 ) require ( - github.com/ProtonMail/go-crypto v1.1.2 // indirect + github.com/ProtonMail/go-crypto v1.3.0 // indirect github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect - github.com/andybalholm/cascadia v1.3.2 // indirect - github.com/cloudflare/circl v1.5.0 // indirect + github.com/andybalholm/cascadia v1.3.3 // indirect + github.com/cloudflare/circl v1.6.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.4.0 // indirect - golang.org/x/net v0.31.0 // indirect - golang.org/x/sys v0.27.0 // indirect - golang.org/x/text v0.20.0 // indirect + golang.org/x/net v0.41.0 // indirect + golang.org/x/sys v0.33.0 // indirect + golang.org/x/text v0.26.0 // indirect ) diff --git a/go.sum b/go.sum index 47637e9..1f5b5b4 100644 --- a/go.sum +++ b/go.sum @@ -1,28 +1,30 @@ -github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0= -github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= +github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/Intevation/gval v1.3.0 h1:+Ze5sft5MmGbZrHj06NVUbcxCb67l9RaPTLMNr37mjw= github.com/Intevation/gval v1.3.0/go.mod h1:xmGyGpP5be12EL0P12h+dqiYG8qn2j3PJxIgkoOHO5o= github.com/Intevation/jsonpath v0.2.1 h1:rINNQJ0Pts5XTFEG+zamtdL7l9uuE1z0FBA+r55Sw+A= github.com/Intevation/jsonpath v0.2.1/go.mod h1:WnZ8weMmwAx/fAO3SutjYFU+v7DFreNYnibV7CiaYIw= -github.com/ProtonMail/go-crypto v1.1.2 h1:A7JbD57ThNqh7XjmHE+PXpQ3Dqt3BrSAC0AL0Go3KS0= -github.com/ProtonMail/go-crypto v1.1.2/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw= +github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ekTTXpdwKYF8eBlsYsDVoggDAuAjoK66k= github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw= -github.com/ProtonMail/gopenpgp/v2 v2.8.0 h1:WvMv3CMcFsqKSM4/Qf8sf3tgyQkzDqQmoSE49bnBuP4= -github.com/ProtonMail/gopenpgp/v2 v2.8.0/go.mod h1:qb2GUSnmA9ipBW5GVtCtEhkummSlqs2A8Ar3S0HBgSY= -github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM= -github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ= -github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA= -github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss= -github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU= -github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys= -github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= +github.com/ProtonMail/gopenpgp/v2 v2.9.0 h1:ruLzBmwe4dR1hdnrsEJ/S7psSBmV15gFttFUPP/+/kE= +github.com/ProtonMail/gopenpgp/v2 v2.9.0/go.mod h1:IldDyh9Hv1ZCCYatTuuEt1XZJ0OPjxLpTarDfglih7s= +github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo= +github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y= +github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM= +github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA= +github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= +github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= +github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4= github.com/jessevdk/go-flags v1.6.1/go.mod h1:Mk8T1hIAWpOiJiHa9rJASDK2UGWji0EuPGBnNLMooyc= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= @@ -31,67 +33,93 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= -github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 h1:KRzFb2m7YtdldCEkzs6KqmJw4nqEVZGK7IN2kJkjTuQ= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= -github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0= -go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I= +go.etcd.io/bbolt v1.4.1 h1:5mOV+HWjIPLEAlUGMsveaUvK2+byZMFOzojoi7bh7uI= +go.etcd.io/bbolt v1.4.1/go.mod h1:c8zu2BnXWTu2XM4XcICtbGSl9cFwsXtcf9zLt2OncM8= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= -golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= +golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= -golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= -golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= +golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= +golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= -golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= +golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= -golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= -golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= -golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= +golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= +golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= -golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= -golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg= -golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= +golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= From 6955c4e37c0462d8cc810e31a3b15e5d6a57b77d Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Jun 2025 10:19:21 +0200 Subject: [PATCH 175/235] Upgrade node.js and format workflow file --- .github/workflows/itest.yml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index 8bc87d5..a99c269 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -5,16 +5,15 @@ jobs: build: runs-on: ubuntu-latest steps: - - name: Set up Go uses: actions/setup-go@v5 with: - go-version: '^1.23.6' + go-version: "^1.23.6" - name: Set up Node.js uses: actions/setup-node@v3 with: - node-version: 16 + node-version: 24 - name: Checkout uses: actions/checkout@v4 @@ -38,8 +37,8 @@ jobs: - name: Upload test results uses: actions/upload-artifact@v4 with: - name: checker-results - path: | - ~/checker-results.html - ~/checker-results-no-clientcert.json - if-no-files-found: error + name: checker-results + path: | + ~/checker-results.html + ~/checker-results-no-clientcert.json + if-no-files-found: error From 34705f3c6e3dcc73b6708e01b91b7e47980bcc52 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 13 Jun 2025 11:00:22 +0200 Subject: [PATCH 176/235] Address comments --- csaf/validation.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/csaf/validation.go b/csaf/validation.go index 3faf549..598d0fa 100644 --- a/csaf/validation.go +++ b/csaf/validation.go @@ -76,11 +76,10 @@ func (l *schemaLoader) loadHTTPURL(url string) (any, error) { if err != nil { return nil, err } + defer resp.Body.Close() if resp.StatusCode != http.StatusOK { - _ = resp.Body.Close() return nil, fmt.Errorf("%s returned status code %d", url, resp.StatusCode) } - defer resp.Body.Close() return jsonschema.UnmarshalJSON(resp.Body) } From dcdbc5d49d951ac677a1e39039c3506aaf65304c Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 6 Jun 2025 22:49:11 +0200 Subject: [PATCH 177/235] Add semver breaking changes detection --- .github/workflows/go.yml | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index b86309f..bed2620 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -12,7 +12,8 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - name: Checkout + uses: actions/checkout@v4 - name: Set up Go uses: actions/setup-go@v4 @@ -38,3 +39,27 @@ jobs: - name: Tests run: go test -v ./... + + run_modver: + runs-on: ubuntu-latest + needs: build # Only run when build job was successful + if: ${{ github.event_name == 'pull_request' && success() }} + permissions: + contents: read # Modver needs to read the repo content + pull-requests: write # Modver needs to write comments/status on PRs + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Modver needs full history for comparison + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version: "stable" + + - name: Modver + uses: bobg/modver@v2.5.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + pull_request_url: https://github.com/${{ github.repository }}/pull/${{ github.event.number }} From cb291bb81b5cd562e906e69b403421e99a978534 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 19 Jun 2025 12:03:52 +0200 Subject: [PATCH 178/235] Update modver --- .github/workflows/go.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 7f21af0..b3f5389 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -56,7 +56,7 @@ jobs: go-version: "stable" - name: Modver - uses: bobg/modver@v2.5.0 + uses: bobg/modver@v2.11.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} pull_request_url: https://github.com/${{ github.repository }}/pull/${{ github.event.number }} From 6ac97810d0337b385633a2a1c8a8f80c6a71b478 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 19 Jun 2025 15:11:45 +0200 Subject: [PATCH 179/235] Use JoinPath This avoids issues where parts of the URL are discarded. --- cmd/csaf_checker/links.go | 3 ++- cmd/csaf_checker/processor.go | 11 ++++++----- cmd/csaf_checker/roliecheck.go | 7 ++++--- cmd/csaf_downloader/downloader.go | 2 +- csaf/advisories.go | 5 +++-- internal/misc/url.go | 21 +++++++++++++++++++++ 6 files changed, 37 insertions(+), 12 deletions(-) create mode 100644 internal/misc/url.go diff --git a/cmd/csaf_checker/links.go b/cmd/csaf_checker/links.go index a323661..c7aec57 100644 --- a/cmd/csaf_checker/links.go +++ b/cmd/csaf_checker/links.go @@ -9,6 +9,7 @@ package main import ( + "github.com/gocsaf/csaf/v3/internal/misc" "io" "net/http" "net/url" @@ -93,7 +94,7 @@ func (pgs pages) listed( return err } // Links may be relative - abs := baseURL.ResolveReference(u).String() + abs := misc.JoinURL(baseURL, u).String() content.links.Add(abs) return nil }) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index ae79133..c0c4437 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -18,6 +18,7 @@ import ( "encoding/json" "errors" "fmt" + "github.com/gocsaf/csaf/v3/internal/misc" "io" "log" "net/http" @@ -644,7 +645,7 @@ func (p *processor) integrity( } fp = makeAbs(fp) - u := b.ResolveReference(fp).String() + u := misc.JoinURL(b, fp).String() // Should this URL be ignored? if p.cfg.ignoreURL(u) { @@ -777,7 +778,7 @@ func (p *processor) integrity( continue } hu = makeAbs(hu) - hashFile := b.ResolveReference(hu).String() + hashFile := misc.JoinURL(b, hu).String() p.checkTLS(hashFile) if res, err = client.Get(hashFile); err != nil { @@ -827,7 +828,7 @@ func (p *processor) integrity( continue } su = makeAbs(su) - sigFile := b.ResolveReference(su).String() + sigFile := misc.JoinURL(b, su).String() p.checkTLS(sigFile) p.badSignatures.use() @@ -1374,7 +1375,7 @@ func (p *processor) checkSecurityFolder(folder string) string { return err.Error() } - u = base.ResolveReference(up).String() + u = misc.JoinURL(base, up).String() p.checkTLS(u) if res, err = client.Get(u); err != nil { return fmt.Sprintf("Cannot fetch %s from security.txt: %v", u, err) @@ -1539,7 +1540,7 @@ func (p *processor) checkPGPKeys(_ string) error { continue } - u := base.ResolveReference(up).String() + u := misc.JoinURL(base, up).String() p.checkTLS(u) res, err := client.Get(u) diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index 28bd437..0a9ff04 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -10,6 +10,7 @@ package main import ( "errors" + "github.com/gocsaf/csaf/v3/internal/misc" "net/http" "net/url" "sort" @@ -237,7 +238,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err) continue } - feedBase := base.ResolveReference(up) + feedBase := misc.JoinURL(base, up) feedURL := feedBase.String() p.checkTLS(feedURL) @@ -270,7 +271,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { continue } - feedURL := base.ResolveReference(up) + feedURL := misc.JoinURL(base, up) feedBase, err := util.BaseURL(feedURL) if err != nil { p.badProviderMetadata.error("Bad base path: %v", err) @@ -325,7 +326,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { continue } - feedBase := base.ResolveReference(up) + feedBase := misc.JoinURL(base, up) makeAbs := makeAbsolute(feedBase) label := defaults(feed.TLPLabel, csaf.TLPLabelUnlabeled) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index bcef357..90e3ac3 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -343,7 +343,7 @@ func (d *downloader) loadOpenPGPKeys( continue } - u := base.ResolveReference(up).String() + u := base.JoinPath(up.Path).String() res, err := client.Get(u) if err != nil { diff --git a/csaf/advisories.go b/csaf/advisories.go index ef3fea8..e7bc11a 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -12,6 +12,7 @@ import ( "context" "encoding/csv" "fmt" + "github.com/gocsaf/csaf/v3/internal/misc" "io" "log/slog" "net/http" @@ -281,7 +282,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( slog.Error("Invalid URL in feed", "feed", *feed.URL, "err", err) continue } - feedURL := afp.base.ResolveReference(up) + feedURL := misc.JoinURL(afp.base, up) slog.Info("Got feed URL", "feed", feedURL) fb, err := util.BaseURL(feedURL) @@ -325,7 +326,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( slog.Error("Invalid URL", "url", u, "err", err) return "" } - return feedBaseURL.ResolveReference(p).String() + return misc.JoinURL(feedBaseURL, p).String() } rfeed.Entries(func(entry *Entry) { diff --git a/internal/misc/url.go b/internal/misc/url.go new file mode 100644 index 0000000..2256a94 --- /dev/null +++ b/internal/misc/url.go @@ -0,0 +1,21 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2025 German Federal Office for Information Security (BSI) +// Software-Engineering: 2025 Intevation GmbH + +package misc + +import "net/url" + +// JoinURL joins the two URLs while preserving the query and fragment part of the latter. +func JoinURL(baseURL *url.URL, relativeURL *url.URL) *url.URL { + u := baseURL.JoinPath(relativeURL.Path) + u.RawQuery = relativeURL.RawQuery + u.RawFragment = relativeURL.RawFragment + // Enforce https, this is required if the base url was only a domain + u.Scheme = "https" + return u +} From 091854a2480e92e705a67f03c7ad621270216439 Mon Sep 17 00:00:00 2001 From: koplas Date: Thu, 19 Jun 2025 11:39:54 +0200 Subject: [PATCH 180/235] Always generate report Closes #385 --- cmd/csaf_checker/processor.go | 3 --- 1 file changed, 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index ae79133..a574a5d 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -253,12 +253,10 @@ func (p *processor) run(domains []string) (*Report, error) { if !p.checkProviderMetadata(d) { // We cannot build a report if the provider metadata cannot be parsed. log.Printf("Could not parse the Provider-Metadata.json of: %s\n", d) - continue } if err := p.checkDomain(d); err != nil { log.Printf("Failed to find valid provider-metadata.json for domain %s: %v. "+ "Continuing with next domain.", d, err) - continue } domain := &Domain{Name: d} @@ -1431,7 +1429,6 @@ func (p *processor) checkDNS(domain string) { // checkWellknown checks if the provider-metadata.json file is // available under the /.well-known/csaf/ directory. func (p *processor) checkWellknown(domain string) { - p.badWellknownMetadata.use() client := p.httpClient() path := "https://" + domain + "/.well-known/csaf/provider-metadata.json" From 1098c6add07755d8f628edd90d3d5bc67796f812 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 20 Jun 2025 14:46:26 +0200 Subject: [PATCH 181/235] Use correct base URL --- cmd/csaf_checker/processor.go | 5 +---- cmd/csaf_checker/roliecheck.go | 3 ++- cmd/csaf_downloader/downloader.go | 1 + csaf/advisories.go | 1 + 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index c0c4437..bfaf9e1 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -632,7 +632,6 @@ func (p *processor) integrity( if err != nil { return err } - makeAbs := makeAbsolute(b) client := p.httpClient() var data bytes.Buffer @@ -643,7 +642,6 @@ func (p *processor) integrity( lg(ErrorType, "Bad URL %s: %v", f, err) continue } - fp = makeAbs(fp) u := misc.JoinURL(b, fp).String() @@ -777,7 +775,6 @@ func (p *processor) integrity( lg(ErrorType, "Bad URL %s: %v", x.url(), err) continue } - hu = makeAbs(hu) hashFile := misc.JoinURL(b, hu).String() p.checkTLS(hashFile) @@ -827,7 +824,6 @@ func (p *processor) integrity( lg(ErrorType, "Bad URL %s: %v", f.SignURL(), err) continue } - su = makeAbs(su) sigFile := misc.JoinURL(b, su).String() p.checkTLS(sigFile) @@ -1527,6 +1523,7 @@ func (p *processor) checkPGPKeys(_ string) error { if err != nil { return err } + base.Path = "" for i := range keys { key := &keys[i] diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index 0a9ff04..ace4d0d 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -222,6 +222,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { if err != nil { return err } + base.Path = "" p.badROLIEFeed.use() advisories := map[*csaf.Feed][]csaf.AdvisoryFile{} @@ -291,7 +292,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { // TODO: Issue a warning if we want check AMBER+ without an // authorizing client. - if err := p.integrity(files, feedBase, rolieMask, p.badProviderMetadata.add); err != nil { + if err := p.integrity(files, base.String(), rolieMask, p.badProviderMetadata.add); err != nil { if err != errContinue { return err } diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 90e3ac3..2b08544 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -229,6 +229,7 @@ func (d *downloader) download(ctx context.Context, domain string) error { if err != nil { return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err) } + base.Path = "" expr := util.NewPathEval() diff --git a/csaf/advisories.go b/csaf/advisories.go index e7bc11a..c5e4fea 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -295,6 +295,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( slog.Error("Cannot parse feed base URL", "url", fb, "err", err) continue } + feedBaseURL.Path = "" res, err := afp.client.Get(feedURL.String()) if err != nil { From 36aab33de4ecfb1107e3174849ff9c750c84b8a0 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 20 Jun 2025 16:50:13 +0200 Subject: [PATCH 182/235] Use folder name as version if git describe failed --- Makefile | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 163ace5..0ae02b0 100644 --- a/Makefile +++ b/Makefile @@ -47,13 +47,18 @@ tag_checked_out: # In this case we might in some situations see an error like # `/bin/bash: line 1: 2b55bbb: value too great for base (error token is "2b55bbb")` # which can be ignored. -GITDESC := $(shell git describe --tags --always) +GITDESC := $(shell git describe --tags --always 2>/dev/null || true) +CURRENT_FOLDER_NAME := $(notdir $(CURDIR)) +ifeq ($(strip $(GITDESC)),) +SEMVER := $(CURRENT_FOLDER_NAME) +else GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/v?[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/') SEMVERPATCH := $(shell echo $$(( $(GITDESCPATCH) + 1 ))) # Hint: The second regexp in the next line only matches # if there is a hyphen (`-`) followed by a number, # by which we assume that git describe has added a string after the tag SEMVER := $(shell echo '$(GITDESC)' | sed -E -e 's/^v//' -e 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' ) +endif testsemver: @echo from \'$(GITDESC)\' transformed to \'$(SEMVER)\' From 9c62e89a23d71d9f9e3cdd24940c3a0c300ac33c Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 24 Jun 2025 14:34:44 +0200 Subject: [PATCH 183/235] Feat: More explicitely handle which doc files are included in the gnulinux dist --- Makefile | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 0ae02b0..f399bf5 100644 --- a/Makefile +++ b/Makefile @@ -103,7 +103,13 @@ dist: build_linux build_win build_mac_amd64 build_mac_arm64 cp docs/$${f}.md dist/$(DISTDIR)-macos/docs ; \ done mkdir dist/$(DISTDIR)-gnulinux-amd64 - cp -r README.md docs bin-linux-amd64 dist/$(DISTDIR)-gnulinux-amd64 + cp -r README.md bin-linux-amd64 dist/$(DISTDIR)-gnulinux-amd64 + # adjust which docs to copy + mkdir -p dist/tmp_docs + cp -r docs/examples dist/tmp_docs + cp docs/*.md dist/tmp_docs + cp -r dist/tmp_docs dist/$(DISTDIR)-gnulinux-amd64/docs + rm -rf dist/tmp_docs cd dist/ ; zip -r $(DISTDIR)-windows-amd64.zip $(DISTDIR)-windows-amd64/ cd dist/ ; tar -cvmlzf $(DISTDIR)-gnulinux-amd64.tar.gz $(DISTDIR)-gnulinux-amd64/ cd dist/ ; tar -cvmlzf $(DISTDIR)-macos.tar.gz $(DISTDIR)-macos From 02d49311526b5ae27226e59487fa0350f25f4359 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 24 Jun 2025 17:06:55 +0200 Subject: [PATCH 184/235] Fix: Return properly early --- cmd/csaf_checker/processor.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index a574a5d..1110af1 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1437,6 +1437,7 @@ func (p *processor) checkWellknown(domain string) { if err != nil { p.badWellknownMetadata.add(ErrorType, fmt.Sprintf("Fetching %s failed: %v", path, err)) + return } if res.StatusCode != http.StatusOK { p.badWellknownMetadata.add(ErrorType, fmt.Sprintf("Fetching %s failed. Status code %d (%s)", From 3f4fe5cf185b73271be7d706e92b065ccfd54703 Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 24 Jun 2025 17:18:16 +0200 Subject: [PATCH 185/235] Also generate report when role is not available --- cmd/csaf_checker/processor.go | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 1110af1..7db2364 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -255,8 +255,7 @@ func (p *processor) run(domains []string) (*Report, error) { log.Printf("Could not parse the Provider-Metadata.json of: %s\n", d) } if err := p.checkDomain(d); err != nil { - log.Printf("Failed to find valid provider-metadata.json for domain %s: %v. "+ - "Continuing with next domain.", d, err) + log.Printf("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) } domain := &Domain{Name: d} @@ -267,8 +266,10 @@ func (p *processor) run(domains []string) (*Report, error) { } if domain.Role == nil { - log.Printf("No role found in meta data. Ignoring domain %q\n", d) - continue + log.Printf("No role found in meta data for domain %q\n", d) + // Assume provider to continue report generation + role := csaf.MetadataRolePublisher + domain.Role = &role } rules := roleRequirements(*domain.Role) From d09db6635da6e753940341513a2d1ae610bf0f49 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Tue, 24 Jun 2025 17:24:08 +0200 Subject: [PATCH 186/235] Fix: Assume most restrictive role to prevent false-positives --- cmd/csaf_checker/processor.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 7db2364..f977092 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -267,8 +267,8 @@ func (p *processor) run(domains []string) (*Report, error) { if domain.Role == nil { log.Printf("No role found in meta data for domain %q\n", d) - // Assume provider to continue report generation - role := csaf.MetadataRolePublisher + // Assume trusted provider to continue report generation + role := csaf.MetadataRoleTrustedProvider domain.Role = &role } From 5d37dd1339d394fe1cc1111f369a670b9e6a61ec Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 25 Jun 2025 09:27:12 +0200 Subject: [PATCH 187/235] Move PMD error from logs to report. --- cmd/csaf_checker/processor.go | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index f977092..ef273d0 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -251,11 +251,16 @@ func (p *processor) run(domains []string) (*Report, error) { p.reset() if !p.checkProviderMetadata(d) { - // We cannot build a report if the provider metadata cannot be parsed. - log.Printf("Could not parse the Provider-Metadata.json of: %s\n", d) + // We need to fail the domain if the PMD cannot be parsed. + p.badProviderMetadata.use() + message := fmt.Sprintf("Could not parse the Provider-Metadata.json of: %s", d) + p.badProviderMetadata.error(message) + } if err := p.checkDomain(d); err != nil { - log.Printf("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) + p.badProviderMetadata.use() + message := fmt.Sprintf("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) + p.badProviderMetadata.error(message) } domain := &Domain{Name: d} From d54e211ef3098e4dd74dc0ff85e8f3324760e4c9 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 27 Jun 2025 09:49:32 +0200 Subject: [PATCH 188/235] docs: improve README.md * Deemphazise the old repo link alert. * Add more hints about officially unsupported but possible use as library. solve #634 --- README.md | 35 ++++++++++++++++++++++------------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index b76bf95..ccb8d67 100644 --- a/README.md +++ b/README.md @@ -9,14 +9,6 @@ --> -> [!IMPORTANT] -> To avoid future breakage, if you still use `csaf-poc`: -> 1. Adjust your HTML links. -> 2. Adjust your go module paths, see [#579](https://github.com/gocsaf/csaf/issues/579#issuecomment-2497244379). -> -> (This repository was moved here on 2024-10-28. The old one is deprecated -> and redirection will be switched off a few months later.) - # csaf @@ -49,13 +41,22 @@ is a tool for testing a CSAF Trusted Provider according to [Section 7 of the CSA ### [csaf_aggregator](docs/csaf_aggregator.md) is a CSAF Aggregator, to list or mirror providers. -## Other stuff + +## Use as go library + +The modules of this repository can be used as library from other Go applications. [ISDuBA](https://github.com/ISDuBA/ISDuBA) does so, for example. +But there is only limited support, and thus _not officially supported_. +There are plans to change this without timeline, with a future major release, +e.g. see [#367](https://github.com/gocsaf/csaf/issues/367). + +Initially envisioned as toolbox, it was not constructed as a library, +and to name one issue, exposes to many functions. +This leads to problems like [#634](https://github.com/gocsaf/csaf/issues/634), where we have to accept that with 3.2.0 there was an unintended API change, +that we now have to live with. ### [examples](./examples/README.md) -are small examples of how to use `github.com/gocsaf/csaf` -as an API. Currently this is a work in progress, as usage of this repository -as a library to access is _not officially supported_, e.g. -see https://github.com/gocsaf/csaf/issues/367 . +are small examples of how to use `github.com/gocsaf/csaf` as an API. Currently this is a work in progress. + ## Setup Binaries for the server side are only available and tested @@ -107,6 +108,14 @@ Binaries will be placed in directories named like `bin-linux-amd64/` and `bin-wi For further details of the development process consult our [development page](./docs/Development.md). +## Previous repo URLs + +> [!NOTE] +> To avoid future breakage, if you have `csaf-poc` in some of your URLs: +> 1. Adjust your HTML links. +> 2. Adjust your go module paths, see [#579](https://github.com/gocsaf/csaf/issues/579#issuecomment-2497244379). +> +> (This repository was moved here from https://github.com/csaf-poc/csaf_distribution on 2024-10-28. The old one is deprecated and redirection will be switched off somtimes in 2025.) ## License From a6d0a0c790644362cb128f473e42c10b8e993bf5 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 27 Jun 2025 10:20:56 +0200 Subject: [PATCH 189/235] docs: extend package csaf doc comment * fix sentence. * add link to the section in the top-level readme that has the limits on the use as a library. --- csaf/doc.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/csaf/doc.go b/csaf/doc.go index f1e092c..233bda6 100644 --- a/csaf/doc.go +++ b/csaf/doc.go @@ -6,7 +6,11 @@ // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH -// Package csaf contains the core data models used by the csaf distribution. +// Package csaf contains the core data models used by the csaf distribution +// tools. +// +// See https://github.com/gocsaf/csaf/tab=readme-ov-file#use-as-go-library +// about hints and limits for its use as a library. package csaf //go:generate go run ./generate_cvss_enums.go -o cvss20enums.go -i ./schema/cvss-v2.0.json -p CVSS20 From 7b7d0c4dcb035d1edd8684d115abd246684e9e60 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 27 Jun 2025 10:24:48 +0200 Subject: [PATCH 190/235] improve phrasing --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index ccb8d67..094412f 100644 --- a/README.md +++ b/README.md @@ -45,9 +45,8 @@ is a CSAF Aggregator, to list or mirror providers. ## Use as go library The modules of this repository can be used as library from other Go applications. [ISDuBA](https://github.com/ISDuBA/ISDuBA) does so, for example. -But there is only limited support, and thus _not officially supported_. -There are plans to change this without timeline, with a future major release, -e.g. see [#367](https://github.com/gocsaf/csaf/issues/367). +But there is only limited support, and thus it is _not officially supported_. +There are plans to change this without concrete schedule, with a future major release, e.g. see [#367](https://github.com/gocsaf/csaf/issues/367). Initially envisioned as toolbox, it was not constructed as a library, and to name one issue, exposes to many functions. From a7b1291be858edd0d555bd7026cd6e2ba050eba5 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 27 Jun 2025 17:20:19 +0200 Subject: [PATCH 191/235] Print warning if no config file was found --- internal/options/options.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/options/options.go b/internal/options/options.go index 3a4867f..38b5bd4 100644 --- a/internal/options/options.go +++ b/internal/options/options.go @@ -46,7 +46,6 @@ type Parser[C any] struct { // If a config file was specified it is loaded. // Returns the arguments and the configuration. func (p *Parser[C]) Parse() ([]string, *C, error) { - var cmdLineOpts C if p.SetDefaults != nil { p.SetDefaults(&cmdLineOpts) @@ -82,6 +81,7 @@ func (p *Parser[C]) Parse() ([]string, *C, error) { // No config file -> We are good. if path == "" { + slog.Warn("No config file found. Maybe you want to specify one or store it in a respective default location", "locations", p.DefaultConfigLocations) return args, &cmdLineOpts, nil } From 27e9519ed56efeecf47fb94257a0f32427ad5aae Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 2 Jul 2025 09:20:27 +0200 Subject: [PATCH 192/235] Fix: Remove some Typos as well as grammatical errors and oddities --- README.md | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 094412f..ad2dc86 100644 --- a/README.md +++ b/README.md @@ -44,14 +44,13 @@ is a CSAF Aggregator, to list or mirror providers. ## Use as go library -The modules of this repository can be used as library from other Go applications. [ISDuBA](https://github.com/ISDuBA/ISDuBA) does so, for example. -But there is only limited support, and thus it is _not officially supported_. -There are plans to change this without concrete schedule, with a future major release, e.g. see [#367](https://github.com/gocsaf/csaf/issues/367). +The modules of this repository can be used as library by other Go applications. [ISDuBA](https://github.com/ISDuBA/ISDuBA) does so, for example. +But there is only limited support and thus it is _not officially supported_. +There are plans to change this without a concrete schedule within a future major release, e.g. see [#367](https://github.com/gocsaf/csaf/issues/367). -Initially envisioned as toolbox, it was not constructed as a library, -and to name one issue, exposes to many functions. -This leads to problems like [#634](https://github.com/gocsaf/csaf/issues/634), where we have to accept that with 3.2.0 there was an unintended API change, -that we now have to live with. +Initially envisioned as a toolbox, it was not constructed as a library, +and to name one issue, exposes too many functions. +This leads to problems like [#634](https://github.com/gocsaf/csaf/issues/634), where we have to accept that with 3.2.0 there was an unintended API change. ### [examples](./examples/README.md) are small examples of how to use `github.com/gocsaf/csaf` as an API. Currently this is a work in progress. From 21ce19735bbeab67353ef97939b53a2fa5322903 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Wed, 2 Jul 2025 09:23:23 +0200 Subject: [PATCH 193/235] Fix: Fix typo and misleading meaning --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ad2dc86..897dfe0 100644 --- a/README.md +++ b/README.md @@ -113,7 +113,7 @@ For further details of the development process consult our [development page](./ > 1. Adjust your HTML links. > 2. Adjust your go module paths, see [#579](https://github.com/gocsaf/csaf/issues/579#issuecomment-2497244379). > -> (This repository was moved here from https://github.com/csaf-poc/csaf_distribution on 2024-10-28. The old one is deprecated and redirection will be switched off somtimes in 2025.) +> (This repository was moved here from https://github.com/csaf-poc/csaf_distribution on 2024-10-28. The old one is deprecated and redirection will be switched off sometime in 2025.) ## License From 3262e2ec2a746a78e1ee829455d37a09df009790 Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 2 Jul 2025 15:33:37 +0200 Subject: [PATCH 194/235] Fix aggregator url base handling --- cmd/csaf_aggregator/mirror.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 1ef5881..f7b3100 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -71,6 +71,7 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) { if err != nil { return nil, err } + base.Path = "" afp := csaf.NewAdvisoryFileProcessor( w.client, From 01c43d96ce47d34cfd981dd297de97b06113055e Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 2 Jul 2025 16:27:58 +0200 Subject: [PATCH 195/235] Fix checker url base handling --- cmd/csaf_checker/processor.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index def1960..2e0a424 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -1374,6 +1374,7 @@ func (p *processor) checkSecurityFolder(folder string) string { if err != nil { return err.Error() } + base.Path = "" u = misc.JoinURL(base, up).String() p.checkTLS(u) From fc3837d655f3b4d08fcf4c61196fd4cfcfa501da Mon Sep 17 00:00:00 2001 From: koplas Date: Wed, 2 Jul 2025 17:06:25 +0200 Subject: [PATCH 196/235] Make json parsing more strict --- cmd/csaf_aggregator/interim.go | 4 ++-- cmd/csaf_aggregator/mirror.go | 5 ++--- cmd/csaf_checker/processor.go | 11 +++++----- cmd/csaf_downloader/downloader.go | 3 ++- cmd/csaf_uploader/processor.go | 7 +++---- cmd/csaf_validator/main.go | 4 ++-- csaf/advisory.go | 5 +++-- csaf/generate_cvss_enums.go | 5 +++-- csaf/models.go | 5 ++--- csaf/providermetaloader.go | 7 +++---- csaf/remotevalidation.go | 6 +++--- csaf/rolie.go | 8 ++++---- internal/misc/json.go | 34 +++++++++++++++++++++++++++++++ 13 files changed, 68 insertions(+), 36 deletions(-) create mode 100644 internal/misc/json.go diff --git a/cmd/csaf_aggregator/interim.go b/cmd/csaf_aggregator/interim.go index 94147bc..8805fdb 100644 --- a/cmd/csaf_aggregator/interim.go +++ b/cmd/csaf_aggregator/interim.go @@ -13,7 +13,6 @@ import ( "crypto/sha256" "crypto/sha512" "encoding/csv" - "encoding/json" "errors" "fmt" "io" @@ -25,6 +24,7 @@ import ( "time" "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -81,7 +81,7 @@ func (w *worker) checkInterims( if err := func() error { defer res.Body.Close() tee := io.TeeReader(res.Body, hasher) - return json.NewDecoder(tee).Decode(&doc) + return misc.StrictJSONParse(tee, &doc) }(); err != nil { return nil, err } diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index 1ef5881..f9ddcad 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -13,7 +13,6 @@ import ( "crypto/sha256" "crypto/sha512" "encoding/hex" - "encoding/json" "fmt" "io" "log/slog" @@ -31,6 +30,7 @@ import ( "github.com/ProtonMail/gopenpgp/v2/crypto" "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -538,7 +538,7 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) download := func(r io.Reader) error { tee := io.TeeReader(r, hasher) - return json.NewDecoder(tee).Decode(&advisory) + return misc.StrictJSONParse(tee, &advisory) } if err := downloadJSON(w.client, file.URL(), download); err != nil { @@ -627,7 +627,6 @@ func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) // If this fails it creates a signature itself with the configured key. func (w *worker) downloadSignatureOrSign(url, fname string, data []byte) error { sig, err := w.downloadSignature(url) - if err != nil { if err != errNotFound { w.log.Error("Could not find signature URL", "url", url, "err", err) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index def1960..08ec55e 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -15,10 +15,8 @@ import ( "crypto/sha512" "crypto/tls" "encoding/csv" - "encoding/json" "errors" "fmt" - "github.com/gocsaf/csaf/v3/internal/misc" "io" "log" "net/http" @@ -30,6 +28,8 @@ import ( "strings" "time" + "github.com/gocsaf/csaf/v3/internal/misc" + "github.com/ProtonMail/gopenpgp/v2/crypto" "golang.org/x/time/rate" @@ -518,7 +518,7 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { return nil, nil, fmt.Errorf("%s: %v", feed, err) } var rolieDoc any - err = json.NewDecoder(bytes.NewReader(all)).Decode(&rolieDoc) + err = misc.StrictJSONParse(bytes.NewReader(all), &rolieDoc) return rfeed, rolieDoc, err }() if err != nil { @@ -702,7 +702,7 @@ func (p *processor) integrity( if err := func() error { defer res.Body.Close() tee := io.TeeReader(res.Body, hasher) - return json.NewDecoder(tee).Decode(&doc) + return misc.StrictJSONParse(tee, &doc) }(); err != nil { lg(ErrorType, "Reading %s failed: %v", u, err) continue @@ -1035,8 +1035,7 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] - times, files = - append(times, t), + times, files = append(times, t), append(files, csaf.DirectoryAdvisoryFile{Path: path}) p.timesChanges[path] = t } diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 2b08544..4890593 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -35,6 +35,7 @@ import ( "golang.org/x/time/rate" "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -551,7 +552,7 @@ func (dc *downloadContext) downloadAdvisory( tee := io.TeeReader(resp.Body, hasher) - if err := json.NewDecoder(tee).Decode(&doc); err != nil { + if err := misc.StrictJSONParse(tee, &doc); err != nil { dc.stats.downloadFailed++ slog.Warn("Downloading failed", "url", file.URL(), diff --git a/cmd/csaf_uploader/processor.go b/cmd/csaf_uploader/processor.go index f655e02..104e1ef 100644 --- a/cmd/csaf_uploader/processor.go +++ b/cmd/csaf_uploader/processor.go @@ -11,7 +11,6 @@ package main import ( "bytes" "crypto/tls" - "encoding/json" "errors" "fmt" "io" @@ -91,7 +90,7 @@ func (p *processor) create() error { Errors []string `json:"errors"` } - if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + if err := misc.StrictJSONParse(resp.Body, &result); err != nil { return err } @@ -115,7 +114,7 @@ func (p *processor) uploadRequest(filename string) (*http.Request, error) { if !p.cfg.NoSchemaCheck { var doc any - if err := json.NewDecoder(bytes.NewReader(data)).Decode(&doc); err != nil { + if err := misc.StrictJSONParse(bytes.NewReader(data), &doc); err != nil { return nil, err } errs, err := csaf.ValidateCSAF(doc) @@ -239,7 +238,7 @@ func (p *processor) process(filename string) error { Errors []string `json:"errors"` } - if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + if err := misc.StrictJSONParse(resp.Body, &result); err != nil { return err } diff --git a/cmd/csaf_validator/main.go b/cmd/csaf_validator/main.go index b3a0855..8cf6d9a 100644 --- a/cmd/csaf_validator/main.go +++ b/cmd/csaf_validator/main.go @@ -10,7 +10,6 @@ package main import ( - "encoding/json" "fmt" "log" "os" @@ -19,6 +18,7 @@ import ( "github.com/jessevdk/go-flags" "github.com/gocsaf/csaf/v3/csaf" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -301,7 +301,7 @@ func loadJSONFromFile(fname string) (any, error) { } defer f.Close() var doc any - if err = json.NewDecoder(f).Decode(&doc); err != nil { + if err = misc.StrictJSONParse(f, &doc); err != nil { return nil, err } return doc, err diff --git a/csaf/advisory.go b/csaf/advisory.go index e81a28a..cc2516a 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -14,6 +14,8 @@ import ( "fmt" "io" "os" + + "github.com/gocsaf/csaf/v3/internal/misc" ) // Acknowledgement reflects the 'acknowledgement' object in the list of acknowledgements. @@ -383,7 +385,6 @@ type Relationship struct { FullProductName *FullProductName `json:"full_product_name"` // required ProductReference *ProductID `json:"product_reference"` // required RelatesToProductReference *ProductID `json:"relates_to_product_reference"` // required - } // Relationships is a list of Relationship. @@ -1391,7 +1392,7 @@ func LoadAdvisory(fname string) (*Advisory, error) { } defer f.Close() var advisory Advisory - if err := json.NewDecoder(f).Decode(&advisory); err != nil { + if err := misc.StrictJSONParse(f, &advisory); err != nil { return nil, err } if err := advisory.Validate(); err != nil { diff --git a/csaf/generate_cvss_enums.go b/csaf/generate_cvss_enums.go index c84ab15..2fa214b 100644 --- a/csaf/generate_cvss_enums.go +++ b/csaf/generate_cvss_enums.go @@ -12,7 +12,6 @@ package main import ( "bytes" - "encoding/json" "flag" "fmt" "go/format" @@ -22,6 +21,8 @@ import ( "sort" "strings" "text/template" + + "github.com/gocsaf/csaf/v3/internal/misc" ) // We from Intevation consider the source code parts in the following @@ -98,7 +99,7 @@ func loadSchema(filename string) (*schema, error) { } defer f.Close() var s schema - if err := json.NewDecoder(f).Decode(&s); err != nil { + if err := misc.StrictJSONParse(f, &s); err != nil { return nil, err } return &s, nil diff --git a/csaf/models.go b/csaf/models.go index c4b132d..983bf9c 100644 --- a/csaf/models.go +++ b/csaf/models.go @@ -17,6 +17,7 @@ import ( "strings" "time" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -575,7 +576,6 @@ func (d *Distribution) Validate() error { // Validate checks if the provider metadata is valid. // Returns an error if the validation fails otherwise nil. func (pmd *ProviderMetadata) Validate() error { - switch { case pmd.CanonicalURL == nil: return errors.New("canonical_url is mandatory") @@ -695,8 +695,7 @@ func (pmd *ProviderMetadata) WriteTo(w io.Writer) (int64, error) { func LoadProviderMetadata(r io.Reader) (*ProviderMetadata, error) { var pmd ProviderMetadata - dec := json.NewDecoder(r) - if err := dec.Decode(&pmd); err != nil { + if err := misc.StrictJSONParse(r, &pmd); err != nil { return nil, err } diff --git a/csaf/providermetaloader.go b/csaf/providermetaloader.go index 72412b3..6f08eb7 100644 --- a/csaf/providermetaloader.go +++ b/csaf/providermetaloader.go @@ -11,13 +11,13 @@ package csaf import ( "bytes" "crypto/sha256" - "encoding/json" "fmt" "io" "log/slog" "net/http" "strings" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -33,7 +33,7 @@ type ProviderMetadataLoader struct { type ProviderMetadataLoadMessageType int const ( - //JSONDecodingFailed indicates problems with JSON decoding + // JSONDecodingFailed indicates problems with JSON decoding JSONDecodingFailed ProviderMetadataLoadMessageType = iota // SchemaValidationFailed indicates a general problem with schema validation. SchemaValidationFailed @@ -149,7 +149,6 @@ func (pmdl *ProviderMetadataLoader) Enumerate(domain string) []*LoadedProviderMe } dnsURL := "https://csaf.data.security." + domain return []*LoadedProviderMetadata{pmdl.loadFromURL(dnsURL)} - } // Load loads one valid provider metadata for a given path. @@ -323,7 +322,7 @@ func (pmdl *ProviderMetadataLoader) loadFromURL(path string) *LoadedProviderMeta var doc any - if err := json.NewDecoder(tee).Decode(&doc); err != nil { + if err := misc.StrictJSONParse(tee, &doc); err != nil { result.Messages.Add( JSONDecodingFailed, fmt.Sprintf("JSON decoding failed: %v", err)) diff --git a/csaf/remotevalidation.go b/csaf/remotevalidation.go index 9e99b6f..97d612e 100644 --- a/csaf/remotevalidation.go +++ b/csaf/remotevalidation.go @@ -18,6 +18,7 @@ import ( "net/http" "sync" + "github.com/gocsaf/csaf/v3/internal/misc" bolt "go.etcd.io/bbolt" ) @@ -180,7 +181,6 @@ func prepareCache(config string) (cache, error) { return create() } return nil - }); err != nil { db.Close() return nil, err @@ -256,7 +256,7 @@ func deserialize(value []byte) (*RemoteValidationResult, error) { } defer r.Close() var rvr RemoteValidationResult - if err := json.NewDecoder(r).Decode(&rvr); err != nil { + if err := misc.StrictJSONParse(r, &rvr); err != nil { return nil, err } return &rvr, nil @@ -323,7 +323,7 @@ func (v *remoteValidator) Validate(doc any) (*RemoteValidationResult, error) { // no cache -> process directly. in = resp.Body } - return json.NewDecoder(in).Decode(&rvr) + return misc.StrictJSONParse(in, &rvr) }(); err != nil { return nil, err } diff --git a/csaf/rolie.go b/csaf/rolie.go index b94cfa3..d3a5ac7 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -14,6 +14,7 @@ import ( "sort" "time" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -54,7 +55,7 @@ type ROLIEServiceDocument struct { // LoadROLIEServiceDocument loads a ROLIE service document from a reader. func LoadROLIEServiceDocument(r io.Reader) (*ROLIEServiceDocument, error) { var rsd ROLIEServiceDocument - if err := json.NewDecoder(r).Decode(&rsd); err != nil { + if err := misc.StrictJSONParse(r, &rsd); err != nil { return nil, err } return &rsd, nil @@ -122,7 +123,7 @@ func (rcd *ROLIECategoryDocument) Merge(categories ...string) bool { // LoadROLIECategoryDocument loads a ROLIE category document from a reader. func LoadROLIECategoryDocument(r io.Reader) (*ROLIECategoryDocument, error) { var rcd ROLIECategoryDocument - if err := json.NewDecoder(r).Decode(&rcd); err != nil { + if err := misc.StrictJSONParse(r, &rcd); err != nil { return nil, err } return &rcd, nil @@ -195,9 +196,8 @@ type ROLIEFeed struct { // LoadROLIEFeed loads a ROLIE feed from a reader. func LoadROLIEFeed(r io.Reader) (*ROLIEFeed, error) { - dec := json.NewDecoder(r) var rf ROLIEFeed - if err := dec.Decode(&rf); err != nil { + if err := misc.StrictJSONParse(r, &rf); err != nil { return nil, err } return &rf, nil diff --git a/internal/misc/json.go b/internal/misc/json.go new file mode 100644 index 0000000..0bb2ec0 --- /dev/null +++ b/internal/misc/json.go @@ -0,0 +1,34 @@ +// This file is Free Software under the Apache-2.0 License +// without warranty, see README.md and LICENSES/Apache-2.0.txt for details. +// +// SPDX-License-Identifier: Apache-2.0 +// +// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) +// Software-Engineering: 2023 Intevation GmbH + +package misc + +import ( + "encoding/json" + "fmt" + "io" +) + +// StrictJSONParse provides JSON parsing with stronger validation. +func StrictJSONParse(jsonData io.Reader, target interface{}) error { + decoder := json.NewDecoder(jsonData) + + decoder.DisallowUnknownFields() + + err := decoder.Decode(target) + if err != nil { + return fmt.Errorf("strictJSONParse: %w", err) + } + + token, err := decoder.Token() + if err != io.EOF { + return fmt.Errorf("strictJSONParse: unexpected trailing data after JSON: token: %v, err: %v", token, err) + } + + return nil +} From e7c08d05cd78ee31a2547acc6b8bfcd85d4aaf04 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Thu, 3 Jul 2025 10:58:32 +0200 Subject: [PATCH 197/235] Rewrite function from scratch --- internal/misc/json.go | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/internal/misc/json.go b/internal/misc/json.go index 0bb2ec0..c30323d 100644 --- a/internal/misc/json.go +++ b/internal/misc/json.go @@ -3,8 +3,8 @@ // // SPDX-License-Identifier: Apache-2.0 // -// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) -// Software-Engineering: 2023 Intevation GmbH +// SPDX-FileCopyrightText: 2025 German Federal Office for Information Security (BSI) +// Software-Engineering: 2025 Intevation GmbH package misc @@ -14,20 +14,23 @@ import ( "io" ) -// StrictJSONParse provides JSON parsing with stronger validation. +// StrictJSONParse creates a JSON decoder that decodes an interface +// while not allowing unknown fields nor trailing data func StrictJSONParse(jsonData io.Reader, target interface{}) error { decoder := json.NewDecoder(jsonData) - + // Don't allow unknown fields decoder.DisallowUnknownFields() - err := decoder.Decode(target) - if err != nil { - return fmt.Errorf("strictJSONParse: %w", err) + if err := decoder.Decode(target); err != nil { + return fmt.Errorf("JSON decoding error: %w", err) } - token, err := decoder.Token() - if err != io.EOF { - return fmt.Errorf("strictJSONParse: unexpected trailing data after JSON: token: %v, err: %v", token, err) + // Check for any trailing data after the main JSON structure + if _, err := decoder.Token(); err != io.EOF { + if err != nil { + return fmt.Errorf("error reading trailing data: %w", err) + } + return fmt.Errorf("unexpected trailing data after JSON object") } return nil From c81f55a752b33236d1b35f980baedaaaa04dea32 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 4 Jul 2025 15:29:03 +0200 Subject: [PATCH 198/235] Add LoadAdvisory tests --- csaf/advisory_test.go | 50 +++++ internal/misc/json.go | 2 +- .../avendor-advisory-0004.json | 171 ++++++++++++++++++ .../unknown-fields/avendor-advisory-0004.json | 171 ++++++++++++++++++ .../valid/avendor-advisory-0004.json | 170 +++++++++++++++++ 5 files changed, 563 insertions(+), 1 deletion(-) create mode 100644 csaf/advisory_test.go create mode 100644 testdata/csaf-documents/trailing-garbage-data/avendor-advisory-0004.json create mode 100644 testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json create mode 100644 testdata/csaf-documents/valid/avendor-advisory-0004.json diff --git a/csaf/advisory_test.go b/csaf/advisory_test.go new file mode 100644 index 0000000..062a713 --- /dev/null +++ b/csaf/advisory_test.go @@ -0,0 +1,50 @@ +package csaf + +import ( + "os" + "path/filepath" + "testing" +) + +func TestLoadAdvisory(t *testing.T) { + type args struct { + jsonDir string + } + tests := []struct { + name string + args args + wantErr bool + }{{ + name: "Valid documents", + args: args{jsonDir: "csaf-documents/valid"}, + wantErr: false, + }, + { + name: "Unknown fields", + args: args{jsonDir: "csaf-documents/unknown-fields"}, + wantErr: true, + }, + { + name: "Garbage trailing data", + args: args{jsonDir: "csaf-documents/trailing-garbage-data"}, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := filepath.Walk("../testdata/"+tt.args.jsonDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.Mode().IsRegular() && filepath.Ext(info.Name()) == ".json" { + if _, err := LoadAdvisory(path); (err != nil) != tt.wantErr { + t.Errorf("LoadAdvisory() error = %v, wantErr %v", err, tt.wantErr) + } + } + return nil + }); err != nil { + t.Fatal(err) + } + }) + } +} diff --git a/internal/misc/json.go b/internal/misc/json.go index c30323d..653c166 100644 --- a/internal/misc/json.go +++ b/internal/misc/json.go @@ -16,7 +16,7 @@ import ( // StrictJSONParse creates a JSON decoder that decodes an interface // while not allowing unknown fields nor trailing data -func StrictJSONParse(jsonData io.Reader, target interface{}) error { +func StrictJSONParse(jsonData io.Reader, target any) error { decoder := json.NewDecoder(jsonData) // Don't allow unknown fields decoder.DisallowUnknownFields() diff --git a/testdata/csaf-documents/trailing-garbage-data/avendor-advisory-0004.json b/testdata/csaf-documents/trailing-garbage-data/avendor-advisory-0004.json new file mode 100644 index 0000000..2131136 --- /dev/null +++ b/testdata/csaf-documents/trailing-garbage-data/avendor-advisory-0004.json @@ -0,0 +1,171 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} +invalid data diff --git a/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json b/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json new file mode 100644 index 0000000..17321ae --- /dev/null +++ b/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json @@ -0,0 +1,171 @@ +{ + "document": { + "unknown-field": false, + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} diff --git a/testdata/csaf-documents/valid/avendor-advisory-0004.json b/testdata/csaf-documents/valid/avendor-advisory-0004.json new file mode 100644 index 0000000..0e194e9 --- /dev/null +++ b/testdata/csaf-documents/valid/avendor-advisory-0004.json @@ -0,0 +1,170 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool", + "version": "0.3.2" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} From 7935818600ee70cbcb7784a67788a4f3bacaba01 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Mon, 7 Jul 2025 11:41:49 +0200 Subject: [PATCH 199/235] Fix: Allow unknown fields: They are not forbidden --- internal/misc/json.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/internal/misc/json.go b/internal/misc/json.go index 653c166..4ecc6a5 100644 --- a/internal/misc/json.go +++ b/internal/misc/json.go @@ -18,8 +18,6 @@ import ( // while not allowing unknown fields nor trailing data func StrictJSONParse(jsonData io.Reader, target any) error { decoder := json.NewDecoder(jsonData) - // Don't allow unknown fields - decoder.DisallowUnknownFields() if err := decoder.Decode(target); err != nil { return fmt.Errorf("JSON decoding error: %w", err) From 4b4d6ed5943c5bf0e953e22454a4da55302b5a15 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Mon, 7 Jul 2025 11:45:36 +0200 Subject: [PATCH 200/235] Remove uknown field tests --- csaf/advisory_test.go | 5 - .../unknown-fields/avendor-advisory-0004.json | 171 ------------------ 2 files changed, 176 deletions(-) delete mode 100644 testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json diff --git a/csaf/advisory_test.go b/csaf/advisory_test.go index 062a713..9a82884 100644 --- a/csaf/advisory_test.go +++ b/csaf/advisory_test.go @@ -19,11 +19,6 @@ func TestLoadAdvisory(t *testing.T) { args: args{jsonDir: "csaf-documents/valid"}, wantErr: false, }, - { - name: "Unknown fields", - args: args{jsonDir: "csaf-documents/unknown-fields"}, - wantErr: true, - }, { name: "Garbage trailing data", args: args{jsonDir: "csaf-documents/trailing-garbage-data"}, diff --git a/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json b/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json deleted file mode 100644 index 17321ae..0000000 --- a/testdata/csaf-documents/unknown-fields/avendor-advisory-0004.json +++ /dev/null @@ -1,171 +0,0 @@ -{ - "document": { - "unknown-field": false, - "category": "csaf_vex", - "csaf_version": "2.0", - "distribution": { - "tlp": { - "label": "WHITE", - "url": "https://www.first.org/tlp/v1/" - } - }, - "notes": [ - { - "category": "summary", - "title": "Test document summary", - "text": "Auto generated test CSAF document" - } - ], - "publisher": { - "category": "vendor", - "name": "ACME Inc.", - "namespace": "https://www.example.com" - }, - "title": "Test CSAF document", - "tracking": { - "current_release_date": "2020-01-01T00:00:00Z", - "generator": { - "date": "2020-01-01T00:00:00Z", - "engine": { - "name": "csaf-tool", - "version": "0.3.2" - } - }, - "id": "Avendor-advisory-0004", - "initial_release_date": "2020-01-01T00:00:00Z", - "revision_history": [ - { - "date": "2020-01-01T00:00:00Z", - "number": "1", - "summary": "Initial version" - } - ], - "status": "final", - "version": "1" - } - }, - "product_tree": { - "branches": [ - { - "category": "vendor", - "name": "AVendor", - "branches": [ - { - "category": "product_name", - "name": "product_1", - "branches": [ - { - "category": "product_version", - "name": "1.1", - "product": { - "name": "AVendor product_1 1.1", - "product_id": "CSAFPID_0001" - } - }, - { - "category": "product_version", - "name": "1.2", - "product": { - "name": "AVendor product_1 1.2", - "product_id": "CSAFPID_0002" - } - }, - { - "category": "product_version", - "name": "2.0", - "product": { - "name": "AVendor product_1 2.0", - "product_id": "CSAFPID_0003" - } - } - ] - } - ] - }, - { - "category": "vendor", - "name": "AVendor1", - "branches": [ - { - "category": "product_name", - "name": "product_2", - "branches": [ - { - "category": "product_version", - "name": "1", - "product": { - "name": "AVendor1 product_2 1", - "product_id": "CSAFPID_0004" - } - } - ] - } - ] - }, - { - "category": "vendor", - "name": "AVendor", - "branches": [ - { - "category": "product_name", - "name": "product_3", - "branches": [ - { - "category": "product_version", - "name": "2022H2", - "product": { - "name": "AVendor product_3 2022H2", - "product_id": "CSAFPID_0005" - } - } - ] - } - ] - } - ] - }, - "vulnerabilities": [ - { - "cve": "CVE-2020-1234", - "notes": [ - { - "category": "description", - "title": "CVE description", - "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" - } - ], - "product_status": { - "under_investigation": ["CSAFPID_0001"] - }, - "threats": [ - { - "category": "impact", - "details": "Customers should upgrade to the latest version of the product", - "date": "2020-01-01T00:00:00Z", - "product_ids": ["CSAFPID_0001"] - } - ] - }, - { - "cve": "CVE-2020-9876", - "notes": [ - { - "category": "description", - "title": "CVE description", - "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" - } - ], - "product_status": { - "under_investigation": ["CSAFPID_0001"] - }, - "threats": [ - { - "category": "impact", - "details": "Still under investigation", - "date": "2020-01-01T00:00:00Z", - "product_ids": ["CSAFPID_0001"] - } - ] - } - ] -} From 230e9f2d2ba50706ab90f1eb7739d00a42d9e335 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 31 Jul 2025 11:29:44 +0200 Subject: [PATCH 201/235] fix minor docs typo --- docs/scripts/Readme.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/scripts/Readme.md b/docs/scripts/Readme.md index 77e8dae..e0bc7c9 100644 --- a/docs/scripts/Readme.md +++ b/docs/scripts/Readme.md @@ -1,5 +1,5 @@ Scripts for assisting the Integration tests. -They were written on Ubuntu 20.04 TLS amd64 and also tested with 24.04 TLS. +They were written on Ubuntu 20.04 LTS amd64 and also tested with 24.04 LTS. - `prepareUbuntuInstanceForITests.sh` installs the required packages for the csaf integration tests on a naked Ubuntu LTS amd64. @@ -8,9 +8,9 @@ and configures nginx for serving TLS connections. - `TLSClientConfigsForITest.sh` generates client certificates by calling `createCCForITest.sh` which uses the root certificate initialized before with `createRootCAForITest.sh`. It configures nginx to enable the authentication with client certificate. (This assumes that the same folder name is used to create the root certificate) -- `setupProviderForITest.sh` builds the csaf_provider, writes the required nginx configurations and create the initial folders. IT calls `uploadToProvider.sh` to upload some csaf example files to the provider. +- `setupProviderForITest.sh` builds the `csaf_provider`, writes the required nginx configurations and create the initial folders. IT calls `uploadToProvider.sh` to upload some csaf example files to the provider. -As creating the folders needs to authenticate with the csaf_provider, the configurations of TLS server and Client certificate authentication should be set. So it is recommended to call the scripts in this order: `TLSConfigsForITest.sh`, `TLSClientConfigsForITest.sh`, `setupProviderForITest.sh` +As creating the folders needs to authenticate with the `csaf_provider`, the configurations of TLS server and Client certificate authentication should be set. So it is recommended to call the scripts in this order: `TLSConfigsForITest.sh`, `TLSClientConfigsForITest.sh`, `setupProviderForITest.sh` Calling example (as user with sudo privileges): ``` bash From 7f27a63e3c42d9647fe4cf6af56f2a9ca5316c7c Mon Sep 17 00:00:00 2001 From: Sebastian Wagner Date: Fri, 1 Aug 2025 11:42:52 +0200 Subject: [PATCH 202/235] docs provider-setup.md: Fix create URL in curl command --- docs/provider-setup.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/provider-setup.md b/docs/provider-setup.md index 2fdf1e3..d54268f 100644 --- a/docs/provider-setup.md +++ b/docs/provider-setup.md @@ -144,7 +144,7 @@ on a GNU/Linux operating system. Create the folders: ```(shell) -curl https://192.168.56.102/cgi-bin/csaf_provider.go/create --cert-type p12 --cert {clientCertificat.p12} +curl https://192.168.56.102/cgi-bin/csaf_provider.go/api/create --cert-type p12 --cert {clientCertificat.p12} ``` Replace {clientCertificate.p12} with the client certificate file in pkcs12 format which includes the corresponding key as well. From 7fc5600521bd624c159d06f3e2a0d50c94390472 Mon Sep 17 00:00:00 2001 From: koplas Date: Fri, 1 Aug 2025 10:55:10 +0200 Subject: [PATCH 203/235] Fix #669 Return error when the create request failed. --- cmd/csaf_uploader/processor.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_uploader/processor.go b/cmd/csaf_uploader/processor.go index 104e1ef..b3e00ce 100644 --- a/cmd/csaf_uploader/processor.go +++ b/cmd/csaf_uploader/processor.go @@ -81,8 +81,9 @@ func (p *processor) create() error { } defer resp.Body.Close() + var createError error if resp.StatusCode != http.StatusOK { - log.Printf("Create failed: %s\n", resp.Status) + createError = fmt.Errorf("create failed: %s", resp.Status) } var result struct { @@ -100,7 +101,7 @@ func (p *processor) create() error { writeStrings("Errors:", result.Errors) - return nil + return createError } // uploadRequest creates the request for uploading a csaf document by passing the filename. From 100e4d395bc64c9ba9ddb658a11bc31d7eb6cf71 Mon Sep 17 00:00:00 2001 From: koplas Date: Tue, 26 Aug 2025 11:49:38 +0200 Subject: [PATCH 204/235] Fix csaf checker listed check Correctly handle URLs that are absolute. --- cmd/csaf_checker/links.go | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/cmd/csaf_checker/links.go b/cmd/csaf_checker/links.go index c7aec57..4eed5f9 100644 --- a/cmd/csaf_checker/links.go +++ b/cmd/csaf_checker/links.go @@ -9,11 +9,12 @@ package main import ( - "github.com/gocsaf/csaf/v3/internal/misc" "io" "net/http" "net/url" + "github.com/gocsaf/csaf/v3/internal/misc" + "github.com/PuerkitoBio/goquery" "github.com/gocsaf/csaf/v3/util" @@ -94,7 +95,12 @@ func (pgs pages) listed( return err } // Links may be relative - abs := misc.JoinURL(baseURL, u).String() + var abs string + if u.IsAbs() { + abs = u.String() + } else { + abs = misc.JoinURL(baseURL, u).String() + } content.links.Add(abs) return nil }) From 108e5f8620a265571e237108ec2a0ae4f257d428 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Tue, 26 Aug 2025 15:24:51 +0200 Subject: [PATCH 205/235] improve docs/csaf_downloader.md (minor) time_range --- docs/csaf_downloader.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index d71b546..74c9e2c 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -104,8 +104,9 @@ ignorepattern = [".*white.*", ".*red.*"] #### Timerange option -The `timerange` parameter enables downloading advisories which last changes falls -into a given intervall. There are three possible notations: +The `time_range` parameter enables downloading advisories +which last changes falls into a given intervall. +There are three possible notations: 1. Relative. If the given string follows the rules of a [Go duration](https://pkg.go.dev/time@go1.20.6#ParseDuration), From 7ab964a3e3d5509f6dcb96e300b8e65b3aa71ff9 Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Mon, 1 Sep 2025 11:48:56 +0200 Subject: [PATCH 206/235] Doc: Highlight the reason for the rate options existence (#662) * Doc: Highlight the reason for the rate options existence * Fix typos --- docs/csaf_aggregator.md | 6 ++++++ docs/csaf_checker.md | 7 +++++++ docs/csaf_downloader.md | 6 ++++++ 3 files changed, 19 insertions(+) diff --git a/docs/csaf_aggregator.md b/docs/csaf_aggregator.md index 661871c..04efa3a 100644 --- a/docs/csaf_aggregator.md +++ b/docs/csaf_aggregator.md @@ -247,3 +247,9 @@ insecure = true In case you want to provide CSAF advisories from others that only qualify as CSAF publishers, see [how to use the `csaf_aggregator` as "CSAF proxy provider"](proxy-provider-for-aggregator.md). + +Some providers may limit the rate of requests that may be sent to retrieve advisories. +This may cause issues with the aggregator. +In this case, the --rate option can be used to adjust the requests per second +sent by each worker of the aggregator to an acceptable rate. +(The rate that is considered acceptable depends on the provider.) diff --git a/docs/csaf_checker.md b/docs/csaf_checker.md index 5152501..5c812bd 100644 --- a/docs/csaf_checker.md +++ b/docs/csaf_checker.md @@ -78,6 +78,13 @@ The option `timerange` allows to only check advisories from a given time interval. It can only be given once. See the [downloader documentation](csaf_downloader.md#timerange-option) for details. +Some providers may limit the rate of requests that may be sent to retrieve advisories. +This may cause the checker to be unable to retrieve all advisories. In this case, +the --rate option can be used to adjust the requests per second +sent by the checker to an acceptable rate. +(The rate that is considered acceptable depends on the provider.) + + You can ignore certain advisories while checking by specifying a list of regular expressions[^1] to match their URLs by using the `ignorepattern` option. diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index d71b546..9168f3c 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -51,6 +51,12 @@ to download more advisories at once. This may improve the overall speed of the d However, since this also increases the load on the servers, their administrators could have taken countermeasures to limit this. +For example, some providers may limit the rate of requests that may be sent to retrieve advisories. +This may cause the downloader to be unable to retrieve all advisories. +In this case, the --rate option can be used to adjust the requests per second +sent by the downloader to an acceptable rate. +(The rate that is considered acceptable depends on the provider.) + If no config file is explictly given the follwing places are searched for a config file: ``` From 1f1a2a4cbc4654942e83d8a8794303b4d17a557e Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Mon, 1 Sep 2025 12:04:17 +0200 Subject: [PATCH 207/235] Add arm64 builds for windows and linux (#663) --- Makefile | 47 +++++++++++++++++++++++++++++++++++------------ 1 file changed, 35 insertions(+), 12 deletions(-) diff --git a/Makefile b/Makefile index f399bf5..8f164fc 100644 --- a/Makefile +++ b/Makefile @@ -12,15 +12,15 @@ SHELL = /bin/bash BUILD = go build MKDIR = mkdir -p -.PHONY: build build_linux build_win build_mac_amd64 build_mac_arm64 tag_checked_out mostlyclean +.PHONY: build build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64 tag_checked_out mostlyclean all: - @echo choose a target from: build build_linux build_win build_mac_amd64 build_mac_arm64 mostlyclean + @echo choose a target from: build build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64 mostlyclean @echo prepend \`make BUILDTAG=1\` to checkout the highest git tag before building @echo or set BUILDTAG to a specific tag # Build all binaries -build: build_linux build_win build_mac_amd64 build_mac_arm64 +build: build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64 # if BUILDTAG == 1 set it to the highest git tag ifeq ($(strip $(BUILDTAG)),1) @@ -29,7 +29,7 @@ endif ifdef BUILDTAG # add the git tag checkout to the requirements of our build targets -build_linux build_win build_mac_amd64 build_mac_arm64: tag_checked_out +build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64: tag_checked_out endif tag_checked_out: @@ -69,31 +69,49 @@ LDFLAGS = -ldflags "-X github.com/gocsaf/csaf/v3/util.SemVersion=$(SEMVER)" # Build binaries and place them under bin-$(GOOS)-$(GOARCH) # Using 'Target-specific Variable Values' to specify the build target system -GOARCH = amd64 -build_linux: GOOS = linux -build_win: GOOS = windows -build_mac_amd64: GOOS = darwin +build_linux: GOOS=linux +build_linux: GOARCH=amd64 -build_mac_arm64: GOARCH = arm64 -build_mac_arm64: GOOS = darwin +build_win: GOOS=windows +build_win: GOARCH=amd64 -build_linux build_win build_mac_amd64 build_mac_arm64: +build_mac_amd64: GOOS=darwin +build_mac_amd64: GOARCH=amd64 + +build_mac_arm64: GOOS=darwin +build_mac_arm64: GOARCH=arm64 + +build_linux_arm64: GOOS=linux +build_linux_arm64: GOARCH=arm64 + +build_win_arm64: GOOS=windows +build_win_arm64: GOARCH=arm64 + +build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64: $(eval BINDIR = bin-$(GOOS)-$(GOARCH)/ ) $(MKDIR) $(BINDIR) env GOARCH=$(GOARCH) GOOS=$(GOOS) $(BUILD) -o $(BINDIR) $(LDFLAGS) -v ./cmd/... DISTDIR := csaf-$(SEMVER) -dist: build_linux build_win build_mac_amd64 build_mac_arm64 +dist: build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64 mkdir -p dist mkdir -p dist/$(DISTDIR)-windows-amd64/bin-windows-amd64 + mkdir -p dist/$(DISTDIR)-windows-arm64/bin-windows-arm64 cp README.md dist/$(DISTDIR)-windows-amd64 + cp README.md dist/$(DISTDIR)-windows-arm64 cp bin-windows-amd64/csaf_uploader.exe bin-windows-amd64/csaf_validator.exe \ bin-windows-amd64/csaf_checker.exe bin-windows-amd64/csaf_downloader.exe \ dist/$(DISTDIR)-windows-amd64/bin-windows-amd64/ + cp bin-windows-arm64/csaf_uploader.exe bin-windows-arm64/csaf_validator.exe \ + bin-windows-arm64/csaf_checker.exe bin-windows-arm64/csaf_downloader.exe \ + dist/$(DISTDIR)-windows-arm64/bin-windows-arm64/ mkdir -p dist/$(DISTDIR)-windows-amd64/docs + mkdir -p dist/$(DISTDIR)-windows-arm64/docs cp docs/csaf_uploader.md docs/csaf_validator.md docs/csaf_checker.md \ docs/csaf_downloader.md dist/$(DISTDIR)-windows-amd64/docs + cp docs/csaf_uploader.md docs/csaf_validator.md docs/csaf_checker.md \ + docs/csaf_downloader.md dist/$(DISTDIR)-windows-arm64/docs mkdir -p dist/$(DISTDIR)-macos/bin-darwin-amd64 \ dist/$(DISTDIR)-macos/bin-darwin-arm64 \ dist/$(DISTDIR)-macos/docs @@ -103,15 +121,20 @@ dist: build_linux build_win build_mac_amd64 build_mac_arm64 cp docs/$${f}.md dist/$(DISTDIR)-macos/docs ; \ done mkdir dist/$(DISTDIR)-gnulinux-amd64 + mkdir dist/$(DISTDIR)-gnulinux-arm64 cp -r README.md bin-linux-amd64 dist/$(DISTDIR)-gnulinux-amd64 + cp -r README.md bin-linux-arm64 dist/$(DISTDIR)-gnulinux-arm64 # adjust which docs to copy mkdir -p dist/tmp_docs cp -r docs/examples dist/tmp_docs cp docs/*.md dist/tmp_docs cp -r dist/tmp_docs dist/$(DISTDIR)-gnulinux-amd64/docs + cp -r dist/tmp_docs dist/$(DISTDIR)-gnulinux-arm64/docs rm -rf dist/tmp_docs cd dist/ ; zip -r $(DISTDIR)-windows-amd64.zip $(DISTDIR)-windows-amd64/ + cd dist/ ; zip -r $(DISTDIR)-windows-arm64.zip $(DISTDIR)-windows-arm64/ cd dist/ ; tar -cvmlzf $(DISTDIR)-gnulinux-amd64.tar.gz $(DISTDIR)-gnulinux-amd64/ + cd dist/ ; tar -cvmlzf $(DISTDIR)-gnulinux-arm64.tar.gz $(DISTDIR)-gnulinux-arm64/ cd dist/ ; tar -cvmlzf $(DISTDIR)-macos.tar.gz $(DISTDIR)-macos # Remove bin-*-* and dist directories From f6927154bf7517adcc6afef29e1244dbbc604647 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Mon, 1 Sep 2025 15:40:26 +0200 Subject: [PATCH 208/235] improve calculated version numbers (#651) for modified git workspaces a `-modified` is added to the semantic version in the makefile. --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 8f164fc..7bb8ef9 100644 --- a/Makefile +++ b/Makefile @@ -47,7 +47,7 @@ tag_checked_out: # In this case we might in some situations see an error like # `/bin/bash: line 1: 2b55bbb: value too great for base (error token is "2b55bbb")` # which can be ignored. -GITDESC := $(shell git describe --tags --always 2>/dev/null || true) +GITDESC := $(shell git describe --tags --always --dirty=-modified 2>/dev/null || true) CURRENT_FOLDER_NAME := $(notdir $(CURDIR)) ifeq ($(strip $(GITDESC)),) SEMVER := $(CURRENT_FOLDER_NAME) From 1a2a8fae9c23cce626be07f7d8d6888823ae507c Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Mon, 1 Sep 2025 15:40:42 +0200 Subject: [PATCH 209/235] improve docs (minor) for csaf_provider (#668) * add a "both" to explain the config file option `certificate_and_password` better. --- docs/csaf_provider.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/csaf_provider.md b/docs/csaf_provider.md index cb27f9f..2fc5354 100644 --- a/docs/csaf_provider.md +++ b/docs/csaf_provider.md @@ -58,7 +58,8 @@ The following example file documents all available configuration options: # The following shows an example of a manually set prefix: #canonical_url_prefix = "https://localhost" -# Require users to use a password and a valid Client Certificate for write access. +# Require users to use both +# (1) a password and (2) a valid Client Certificate for write access. #certificate_and_password = false # Allow the user to send the request without having to send a passphrase From 187d1146311159fbb7f91d5019ee4d6eb479ff16 Mon Sep 17 00:00:00 2001 From: Paul Schwabauer Date: Mon, 1 Sep 2025 16:13:57 +0200 Subject: [PATCH 210/235] Remove unnecessary URL joins (#676) This should avoid bugs for more complex scenarios. --- cmd/csaf_aggregator/mirror.go | 5 ++-- cmd/csaf_checker/processor.go | 47 ++++++++++++------------------- cmd/csaf_checker/roliecheck.go | 18 +++--------- cmd/csaf_downloader/downloader.go | 13 +++------ csaf/advisories.go | 28 +++++++++--------- 5 files changed, 41 insertions(+), 70 deletions(-) diff --git a/cmd/csaf_aggregator/mirror.go b/cmd/csaf_aggregator/mirror.go index a013553..9653ea9 100644 --- a/cmd/csaf_aggregator/mirror.go +++ b/cmd/csaf_aggregator/mirror.go @@ -67,17 +67,16 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) { // Collecting the categories per label. w.categories = map[string]util.Set[string]{} - base, err := url.Parse(w.loc) + pmdURL, err := url.Parse(w.loc) if err != nil { return nil, err } - base.Path = "" afp := csaf.NewAdvisoryFileProcessor( w.client, w.expr, w.metadataProvider, - base) + pmdURL) afp.AgeAccept = w.provider.ageAccept(w.processor.cfg) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 18ef49e..6e780ca 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -628,14 +628,9 @@ var yearFromURL = regexp.MustCompile(`.*/(\d{4})/[^/]+$`) // mistakes, from conforming filenames to invalid advisories. func (p *processor) integrity( files []csaf.AdvisoryFile, - base string, mask whereType, lg func(MessageType, string, ...any), ) error { - b, err := url.Parse(base) - if err != nil { - return err - } client := p.httpClient() var data bytes.Buffer @@ -647,7 +642,7 @@ func (p *processor) integrity( continue } - u := misc.JoinURL(b, fp).String() + u := fp.String() // Should this URL be ignored? if p.cfg.ignoreURL(u) { @@ -779,7 +774,7 @@ func (p *processor) integrity( lg(ErrorType, "Bad URL %s: %v", x.url(), err) continue } - hashFile := misc.JoinURL(b, hu).String() + hashFile := hu.String() p.checkTLS(hashFile) if res, err = client.Get(hashFile); err != nil { @@ -828,7 +823,7 @@ func (p *processor) integrity( lg(ErrorType, "Bad URL %s: %v", f.SignURL(), err) continue } - sigFile := misc.JoinURL(b, su).String() + sigFile := su.String() p.checkTLS(sigFile) p.badSignatures.use() @@ -948,12 +943,13 @@ func (p *processor) checkIndex(base string, mask whereType) error { scanner := bufio.NewScanner(res.Body) for line := 1; scanner.Scan(); line++ { u := scanner.Text() - if _, err := url.Parse(u); err != nil { + up, err := url.Parse(u) + if err != nil { p.badIntegrities.error("index.txt contains invalid URL %q in line %d", u, line) continue } - files = append(files, csaf.DirectoryAdvisoryFile{Path: u}) + files = append(files, csaf.DirectoryAdvisoryFile{Path: misc.JoinURL(bu, up).String()}) } return files, scanner.Err() }() @@ -968,7 +964,7 @@ func (p *processor) checkIndex(base string, mask whereType) error { // Block rolie checks. p.labelChecker.feedLabel = "" - return p.integrity(files, base, mask, p.badIndices.add) + return p.integrity(files, mask, p.badIndices.add) } // checkChanges fetches the "changes.csv" and calls the "checkTLS" method for HTTPs checks. @@ -1035,8 +1031,13 @@ func (p *processor) checkChanges(base string, mask whereType) error { } path := r[pathColumn] + pathURL, err := url.Parse(path) + if err != nil { + return nil, nil, err + } + times, files = append(times, t), - append(files, csaf.DirectoryAdvisoryFile{Path: path}) + append(files, csaf.DirectoryAdvisoryFile{Path: misc.JoinURL(bu, pathURL).String()}) p.timesChanges[path] = t } return times, files, nil @@ -1063,7 +1064,7 @@ func (p *processor) checkChanges(base string, mask whereType) error { // Block rolie checks. p.labelChecker.feedLabel = "" - return p.integrity(files, base, mask, p.badChanges.add) + return p.integrity(files, mask, p.badChanges.add) } // empty checks if list of strings contains at least one none empty string. @@ -1364,18 +1365,11 @@ func (p *processor) checkSecurityFolder(folder string) string { } // Try to load - up, err := url.Parse(u) + _, err = url.Parse(u) if err != nil { return fmt.Sprintf("CSAF URL '%s' invalid: %v", u, err) } - base, err := url.Parse(folder) - if err != nil { - return err.Error() - } - base.Path = "" - - u = misc.JoinURL(base, up).String() p.checkTLS(u) if res, err = client.Get(u); err != nil { return fmt.Sprintf("Cannot fetch %s from security.txt: %v", u, err) @@ -1523,12 +1517,6 @@ func (p *processor) checkPGPKeys(_ string) error { client := p.httpClient() - base, err := url.Parse(p.pmdURL) - if err != nil { - return err - } - base.Path = "" - for i := range keys { key := &keys[i] if key.URL == nil { @@ -1541,10 +1529,11 @@ func (p *processor) checkPGPKeys(_ string) error { continue } - u := misc.JoinURL(base, up).String() + // Todo: refactor all methods to directly accept *url.URL + u := up.String() p.checkTLS(u) - res, err := client.Get(u) + res, err := client.Get(*key.URL) if err != nil { p.badPGPs.error("Fetching public OpenPGP key %s failed: %v.", u, err) continue diff --git a/cmd/csaf_checker/roliecheck.go b/cmd/csaf_checker/roliecheck.go index ace4d0d..f510992 100644 --- a/cmd/csaf_checker/roliecheck.go +++ b/cmd/csaf_checker/roliecheck.go @@ -10,7 +10,6 @@ package main import ( "errors" - "github.com/gocsaf/csaf/v3/internal/misc" "net/http" "net/url" "sort" @@ -217,12 +216,6 @@ func defaults[T any](p *T, def T) T { // processROLIEFeeds goes through all ROLIE feeds and checks their // integrity and completeness. func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { - - base, err := url.Parse(p.pmdURL) - if err != nil { - return err - } - base.Path = "" p.badROLIEFeed.use() advisories := map[*csaf.Feed][]csaf.AdvisoryFile{} @@ -234,12 +227,11 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { if feed.URL == nil { continue } - up, err := url.Parse(string(*feed.URL)) + feedBase, err := url.Parse(string(*feed.URL)) if err != nil { p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err) continue } - feedBase := misc.JoinURL(base, up) feedURL := feedBase.String() p.checkTLS(feedURL) @@ -266,13 +258,12 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { continue } - up, err := url.Parse(string(*feed.URL)) + feedURL, err := url.Parse(string(*feed.URL)) if err != nil { p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err) continue } - feedURL := misc.JoinURL(base, up) feedBase, err := util.BaseURL(feedURL) if err != nil { p.badProviderMetadata.error("Bad base path: %v", err) @@ -292,7 +283,7 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { // TODO: Issue a warning if we want check AMBER+ without an // authorizing client. - if err := p.integrity(files, base.String(), rolieMask, p.badProviderMetadata.add); err != nil { + if err := p.integrity(files, rolieMask, p.badProviderMetadata.add); err != nil { if err != errContinue { return err } @@ -321,13 +312,12 @@ func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error { continue } - up, err := url.Parse(string(*feed.URL)) + feedBase, err := url.Parse(string(*feed.URL)) if err != nil { p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err) continue } - feedBase := misc.JoinURL(base, up) makeAbs := makeAbsolute(feedBase) label := defaults(feed.TLPLabel, csaf.TLPLabelUnlabeled) diff --git a/cmd/csaf_downloader/downloader.go b/cmd/csaf_downloader/downloader.go index 4890593..4edd724 100644 --- a/cmd/csaf_downloader/downloader.go +++ b/cmd/csaf_downloader/downloader.go @@ -226,18 +226,16 @@ func (d *downloader) download(ctx context.Context, domain string) error { } } - base, err := url.Parse(lpmd.URL) + pmdURL, err := url.Parse(lpmd.URL) if err != nil { return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err) } - base.Path = "" expr := util.NewPathEval() if err := d.loadOpenPGPKeys( client, lpmd.Document, - base, expr, ); err != nil { return err @@ -247,7 +245,7 @@ func (d *downloader) download(ctx context.Context, domain string) error { client, expr, lpmd.Document, - base) + pmdURL) // Do we need time range based filtering? if d.cfg.Range != nil { @@ -312,7 +310,6 @@ allFiles: func (d *downloader) loadOpenPGPKeys( client util.Client, doc any, - base *url.URL, expr *util.PathEval, ) error { src, err := expr.Eval("$.public_openpgp_keys", doc) @@ -337,7 +334,7 @@ func (d *downloader) loadOpenPGPKeys( if key.URL == nil { continue } - up, err := url.Parse(*key.URL) + u, err := url.Parse(*key.URL) if err != nil { slog.Warn("Invalid URL", "url", *key.URL, @@ -345,9 +342,7 @@ func (d *downloader) loadOpenPGPKeys( continue } - u := base.JoinPath(up.Path).String() - - res, err := client.Get(u) + res, err := client.Get(u.String()) if err != nil { slog.Warn( "Fetching public OpenPGP key failed", diff --git a/csaf/advisories.go b/csaf/advisories.go index c5e4fea..33dfa03 100644 --- a/csaf/advisories.go +++ b/csaf/advisories.go @@ -12,7 +12,6 @@ import ( "context" "encoding/csv" "fmt" - "github.com/gocsaf/csaf/v3/internal/misc" "io" "log/slog" "net/http" @@ -20,6 +19,7 @@ import ( "strings" "time" + "github.com/gocsaf/csaf/v3/internal/misc" "github.com/gocsaf/csaf/v3/util" ) @@ -96,7 +96,7 @@ type AdvisoryFileProcessor struct { client util.Client expr *util.PathEval doc any - base *url.URL + pmdURL *url.URL } // NewAdvisoryFileProcessor constructs a filename extractor @@ -105,13 +105,13 @@ func NewAdvisoryFileProcessor( client util.Client, expr *util.PathEval, doc any, - base *url.URL, + pmdURL *url.URL, ) *AdvisoryFileProcessor { return &AdvisoryFileProcessor{ client: client, expr: expr, doc: doc, - base: base, + pmdURL: pmdURL, } } @@ -180,7 +180,7 @@ func (afp *AdvisoryFileProcessor) Process( // Not found -> fall back to PMD url if empty(dirURLs) { - baseURL, err := util.BaseURL(afp.base) + baseURL, err := util.BaseURL(afp.pmdURL) if err != nil { return err } @@ -262,8 +262,13 @@ func (afp *AdvisoryFileProcessor) loadChanges( continue } + pathURL, err := url.Parse(path) + if err != nil { + return nil, err + } + files = append(files, - DirectoryAdvisoryFile{Path: base.JoinPath(path).String()}) + DirectoryAdvisoryFile{Path: misc.JoinURL(base, pathURL).String()}) } return files, nil } @@ -277,12 +282,11 @@ func (afp *AdvisoryFileProcessor) processROLIE( if feed.URL == nil { continue } - up, err := url.Parse(string(*feed.URL)) + feedURL, err := url.Parse(string(*feed.URL)) if err != nil { slog.Error("Invalid URL in feed", "feed", *feed.URL, "err", err) continue } - feedURL := misc.JoinURL(afp.base, up) slog.Info("Got feed URL", "feed", feedURL) fb, err := util.BaseURL(feedURL) @@ -290,12 +294,6 @@ func (afp *AdvisoryFileProcessor) processROLIE( slog.Error("Invalid feed base URL", "url", fb, "err", err) continue } - feedBaseURL, err := url.Parse(fb) - if err != nil { - slog.Error("Cannot parse feed base URL", "url", fb, "err", err) - continue - } - feedBaseURL.Path = "" res, err := afp.client.Get(feedURL.String()) if err != nil { @@ -327,7 +325,7 @@ func (afp *AdvisoryFileProcessor) processROLIE( slog.Error("Invalid URL", "url", u, "err", err) return "" } - return misc.JoinURL(feedBaseURL, p).String() + return p.String() } rfeed.Entries(func(entry *Entry) { From d1f33ab27dc55948822bdaa5b69c324863258f1f Mon Sep 17 00:00:00 2001 From: Marius Goetze Date: Mon, 8 Sep 2025 13:10:50 +0200 Subject: [PATCH 211/235] fix incorrect usage of formatted string output probably unchanged, but now `go vet` is happy that formatted strings are not misused --- cmd/csaf_checker/processor.go | 42 ++++++++++++++++---------------- internal/models/models_test.go | 8 +++--- internal/options/options_test.go | 6 ++--- 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index 6e780ca..e427b44 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -536,7 +536,7 @@ func (p *processor) rolieFeedEntries(feed string) ([]csaf.AdvisoryFile, error) { if len(errors) > 0 { p.badProviderMetadata.error("%s: Validating against JSON schema failed:", feed) for _, msg := range errors { - p.badProviderMetadata.error(strings.ReplaceAll(msg, `%`, `%%`)) + p.badProviderMetadata.error("%s", strings.ReplaceAll(msg, `%`, `%%`)) } } @@ -736,7 +736,7 @@ func (p *processor) integrity( switch date, fault := p.extractTime(doc, `initial_release_date`, u); { case fault != "": - p.badFolders.error(fault) + p.badFolders.error("%s", fault) case folderYear == nil: p.badFolders.error("No year folder found in %s", u) case date.UTC().Year() != *folderYear: @@ -744,7 +744,7 @@ func (p *processor) integrity( } current, fault := p.extractTime(doc, `current_release_date`, u) if fault != "" { - p.badChanges.error(fault) + p.badChanges.error("%s", fault) } else { p.timesAdv[f.URL()] = current } @@ -814,7 +814,7 @@ func (p *processor) integrity( msgType = InfoType } for _, fetchError := range hashFetchErrors { - p.badIntegrities.add(msgType, fetchError) + p.badIntegrities.add(msgType, "%s", fetchError) } // Check signature @@ -1052,7 +1052,7 @@ func (p *processor) checkChanges(base string, mask whereType) error { if p.cfg.Range != nil { filtered = " (maybe filtered out by time interval)" } - p.badChanges.warn("no entries in changes.csv found" + filtered) + p.badChanges.warn("%s", "no entries in changes.csv found"+filtered) } if !sort.SliceIsSorted(times, func(i, j int) bool { @@ -1300,8 +1300,8 @@ func (p *processor) checkProviderMetadata(domain string) bool { for i := range lpmd.Messages { p.badProviderMetadata.warn( - "Unexpected situation while loading provider-metadata.json: " + - lpmd.Messages[i].Message) + "Unexpected situation while loading provider-metadata.json: %s", + lpmd.Messages[i].Message) } if !lpmd.Valid() { @@ -1401,25 +1401,25 @@ func (p *processor) checkDNS(domain string) { res, err := client.Get(path) if err != nil { p.badDNSPath.add(ErrorType, - fmt.Sprintf("Fetching %s failed: %v", path, err)) + "Fetching %s failed: %v", path, err) return } if res.StatusCode != http.StatusOK { - p.badDNSPath.add(ErrorType, fmt.Sprintf("Fetching %s failed. Status code %d (%s)", - path, res.StatusCode, res.Status)) + p.badDNSPath.add(ErrorType, "Fetching %s failed. Status code %d (%s)", + path, res.StatusCode, res.Status) } hash := sha256.New() defer res.Body.Close() content, err := io.ReadAll(res.Body) if err != nil { p.badDNSPath.add(ErrorType, - fmt.Sprintf("Error while reading the response from %s", path)) + "Error while reading the response from %s", path) } hash.Write(content) if !bytes.Equal(hash.Sum(nil), p.pmd256) { p.badDNSPath.add(ErrorType, - fmt.Sprintf("%s does not serve the same provider-metadata.json as previously found", - path)) + "%s does not serve the same provider-metadata.json as previously found", + path) } } @@ -1433,12 +1433,12 @@ func (p *processor) checkWellknown(domain string) { res, err := client.Get(path) if err != nil { p.badWellknownMetadata.add(ErrorType, - fmt.Sprintf("Fetching %s failed: %v", path, err)) + "Fetching %s failed: %v", path, err) return } if res.StatusCode != http.StatusOK { - p.badWellknownMetadata.add(ErrorType, fmt.Sprintf("Fetching %s failed. Status code %d (%s)", - path, res.StatusCode, res.Status)) + p.badWellknownMetadata.add(ErrorType, "Fetching %s failed. Status code %d (%s)", + path, res.StatusCode, res.Status) } } @@ -1475,13 +1475,13 @@ func (p *processor) checkWellknownSecurityDNS(domain string) error { // but found in the legacy location, and inform about finding it there (2). switch warnings { case 0: - p.badSecurity.add(InfoType, sDMessage) + p.badSecurity.add(InfoType, "%s", sDMessage) case 1: - p.badSecurity.add(ErrorType, sDMessage) - p.badSecurity.add(ErrorType, sLMessage) + p.badSecurity.add(ErrorType, "%s", sDMessage) + p.badSecurity.add(ErrorType, "%s", sLMessage) case 2: - p.badSecurity.add(WarnType, sDMessage) - p.badSecurity.add(InfoType, sLMessage) + p.badSecurity.add(WarnType, "%s", sDMessage) + p.badSecurity.add(InfoType, "%s", sLMessage) } p.checkDNS(domain) diff --git a/internal/models/models_test.go b/internal/models/models_test.go index 777a428..48cd02d 100644 --- a/internal/models/models_test.go +++ b/internal/models/models_test.go @@ -81,7 +81,7 @@ func TestUnmarshalText(t *testing.T) { byteSlice := []byte{'3', 'h'} var emptySlice []byte if testTimeRange.UnmarshalText(byteSlice) != nil { - t.Errorf(testTimeRange.UnmarshalText(byteSlice).Error()) + t.Error(testTimeRange.UnmarshalText(byteSlice).Error()) } if testTimeRange.UnmarshalText(emptySlice) == nil { t.Errorf("Failure: UnmarshalText succeeded on invalid slice of bytes.") @@ -104,10 +104,10 @@ func TestUnmarshalFlag(t *testing.T) { time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC), time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC)) if err := testTimeRange.UnmarshalFlag("3h"); err != nil { - t.Errorf(err.Error()) + t.Error(err.Error()) } if err := testTimeRange.UnmarshalFlag("2006-01-02T15:04:05"); err != nil { - t.Errorf(err.Error()) + t.Error(err.Error()) } if err := testTimeRange.UnmarshalFlag("2006-01-02T15:04:05a"); err == nil { t.Errorf("Failure: Extracted time from invalid string") @@ -119,7 +119,7 @@ func TestUnmarshalFlag(t *testing.T) { t.Errorf("Failure: Extracted time from invalid string") } if err := testTimeRange.UnmarshalFlag("2006-01-02T15:04:05, 2007-01-02T15:04:05"); err != nil { - t.Errorf(err.Error()) + t.Error(err.Error()) } } diff --git a/internal/options/options_test.go b/internal/options/options_test.go index 2768e37..6e96838 100644 --- a/internal/options/options_test.go +++ b/internal/options/options_test.go @@ -90,7 +90,7 @@ func TestParse(t *testing.T) { cmd.Env = append(os.Environ(), "TEST_HELP=1") err := cmd.Run() if err != nil { - t.Fatalf(err.Error()) + t.Fatal(err.Error()) } // test the version flag @@ -104,7 +104,7 @@ func TestParse(t *testing.T) { cmd.Env = append(os.Environ(), "TEST_VERSION=1") err = cmd.Run() if err != nil { - t.Fatalf(err.Error()) + t.Fatal(err.Error()) } } @@ -140,7 +140,7 @@ func TestLoadToml(t *testing.T) { t.Errorf("Failure: Succeeded in parsing nonexistant parameter") } if err := loadTOML(&cfg, "data/config.toml"); err != nil { - t.Errorf(err.Error()) + t.Error(err.Error()) } } From 5c1b0612551662490b90f11512e1c09fdec64447 Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Fri, 12 Sep 2025 11:38:56 +0200 Subject: [PATCH 212/235] Rename workflow go_legacy to "Go Test (oldstable)" so it is distinct from the other "Go" workflow --- .github/workflows/go_legacy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/go_legacy.yml b/.github/workflows/go_legacy.yml index a86368d..fda6413 100644 --- a/.github/workflows/go_legacy.yml +++ b/.github/workflows/go_legacy.yml @@ -1,4 +1,4 @@ -name: Go +name: Go Test (oldstable) on: push: From bcb7c8be10c662216edc310854a2fa12631f00cb Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Fri, 12 Sep 2025 11:41:13 +0200 Subject: [PATCH 213/235] rename go_legacy.yml -> go-oldstable.yml --- .github/workflows/{go_legacy.yml => go-oldstable.yml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{go_legacy.yml => go-oldstable.yml} (100%) diff --git a/.github/workflows/go_legacy.yml b/.github/workflows/go-oldstable.yml similarity index 100% rename from .github/workflows/go_legacy.yml rename to .github/workflows/go-oldstable.yml From 0dbf822cbdcaa3737f12d21f82a3f1fb59fc085c Mon Sep 17 00:00:00 2001 From: mgoetzegb Date: Mon, 15 Sep 2025 12:42:30 +0200 Subject: [PATCH 214/235] fix doc comment: remove untrue claim of disallowing unknown fields (#677) adjust comment to fit https://github.com/gocsaf/csaf/pull/655/commits/7935818600ee70cbcb7784a67788a4f3bacaba01 --- internal/misc/json.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/misc/json.go b/internal/misc/json.go index 4ecc6a5..d9e87c3 100644 --- a/internal/misc/json.go +++ b/internal/misc/json.go @@ -15,7 +15,7 @@ import ( ) // StrictJSONParse creates a JSON decoder that decodes an interface -// while not allowing unknown fields nor trailing data +// while not allowing trailing data func StrictJSONParse(jsonData io.Reader, target any) error { decoder := json.NewDecoder(jsonData) From 05eae0a9ae6f945f946dd815ec7463da5ba0a7de Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer <107021473+JanHoefelmeyer@users.noreply.github.com> Date: Wed, 1 Oct 2025 11:14:09 +0200 Subject: [PATCH 215/235] Re-add unknown fields check (#681) --- internal/misc/json.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/internal/misc/json.go b/internal/misc/json.go index d9e87c3..2888302 100644 --- a/internal/misc/json.go +++ b/internal/misc/json.go @@ -18,6 +18,8 @@ import ( // while not allowing trailing data func StrictJSONParse(jsonData io.Reader, target any) error { decoder := json.NewDecoder(jsonData) + // Don't allow unknown fields + decoder.DisallowUnknownFields() if err := decoder.Decode(target); err != nil { return fmt.Errorf("JSON decoding error: %w", err) From c6bad42c24b6262ebc07fd48a2d622be9162088a Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Wed, 22 Oct 2025 16:57:00 +0200 Subject: [PATCH 216/235] Improve LoadCertificate unit test (#692) * fix `LoadCertificate` unit test replaced certificate with invalid dns name, which is rejected by stdlib of Go version >=1.25.2. Change in Go introduced by https://github.com/golang/go/issues/75715 * code review: add script to generate certificates, remove `greenbone` org entry * code review: add license header * rework cert creation and fix one filename --------- Co-authored-by: Marius Goetze --- internal/certs/certs_test.go | 10 +- internal/certs/createTestCerts.sh | 60 +++++ internal/certs/data/cert.crt | 61 +++-- internal/certs/data/private.pem | 42 ++++ internal/certs/data/privated.pem | 42 ---- internal/certs/data/testclient.crt | 49 ++-- internal/certs/data/testclientkey.pem | 308 +++++++++++++------------- 7 files changed, 311 insertions(+), 261 deletions(-) create mode 100755 internal/certs/createTestCerts.sh create mode 100644 internal/certs/data/private.pem delete mode 100644 internal/certs/data/privated.pem diff --git a/internal/certs/certs_test.go b/internal/certs/certs_test.go index e2f1af5..5bd7025 100644 --- a/internal/certs/certs_test.go +++ b/internal/certs/certs_test.go @@ -20,13 +20,13 @@ func TestLoadCertificates(t *testing.T) { passphrase = "qwer" missingCert = "data/testclientcert_missing.crt" missingTestkey = "data/testclientkey_missing.pem" - privateKey = "data/privated.pem" + privateKey = "data/private.pem" privateCert = "data/cert.crt" ) // Try to load cert that is not protected, expect success. if cert, err := LoadCertificate(&testCert, &testKey, nil); cert == nil || err != nil { - t.Errorf("Failure: Couldn't load supposedly valid certificate.") + t.Errorf("Failure: Couldn't load supposedly valid certificate. Got error: %v", err) } // Try to load no cert, expect error. if cert, err := LoadCertificate(nil, &testKey, nil); cert != nil || err == nil { @@ -46,7 +46,7 @@ func TestLoadCertificates(t *testing.T) { } // Try to load encrypted cert, expecting success. if cert, err := LoadCertificate(&privateCert, &privateKey, &passphrase); cert == nil || err != nil { - t.Errorf("Failure: Couldn't load supposedly valid encrypted certificate.") + t.Errorf("Failure: Couldn't load supposedly valid encrypted certificate. Got error: %v", err) } // Try to load wrong encrypted cert, expecting error. if cert, err := LoadCertificate(&testKey, &privateKey, &passphrase); cert != nil || err == nil { @@ -56,8 +56,8 @@ func TestLoadCertificates(t *testing.T) { if cert, err := LoadCertificate(&missingCert, &privateKey, &passphrase); cert != nil || err == nil { t.Errorf("Failure: No Failure while loading nonexistens certificate.") } - // Try to load nonexistent encrypted cert, expecting error. + // Try to load nonexistent encrypted cert, expecting success. if cert, err := LoadCertificate(nil, nil, nil); cert != nil || err != nil { - t.Errorf("Failure: Expected nil return.") + t.Errorf("Failure: Expected nil return. Got error: %v", err) } } diff --git a/internal/certs/createTestCerts.sh b/internal/certs/createTestCerts.sh new file mode 100755 index 0000000..084677e --- /dev/null +++ b/internal/certs/createTestCerts.sh @@ -0,0 +1,60 @@ +#!/bin/bash +# +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: 2025 German Federal Office for Information Security (BSI) +# Software-Engineering: 2025 Intevation GmbH + +# cab be used to generated the certificates for the go tests +# as the resulting files are in the repository, this script does not +# need to be run each time, its purpose is to document how the keys and +# certs were created + +set -e + +certtool --generate-privkey --outfile testserver-key.pem + + +echo ' +organization = "CSAF" +unit = "CSAF Distribution" +country = "DE" +cn = "csaf.test" + +dns_name = "csaf.test" +dns_name = "localhost" +dns_name = "*.csaf.test" +ip_address = "127.0.0.1" +ip_address = "::1" + +tls_www_server +tls_www_client +ocsp_signing_key +encryption_key +signing_key +expiration_days = 36500 +' > gnutls-certtool.testserver.template + +certtool --generate-self-signed --load-privkey testserver-key.pem --outfile cert.crt --template gnutls-certtool.testserver.template --stdout | head -1 + +# for testing legacy code path, we use openssl's traditional mode to +# create a password protected variant after RFC 1423 that still can be read +# by https://pkg.go.dev/crypto/x509#DecryptPEMBlock. Citation: +# Legacy PEM encryption as specified in RFC 1423 is insecure by design. +# Since it does not authenticate the ciphertext, it is vulnerable +# to padding oracle attacks that can let an attacker recover the plaintext. +openssl rsa -in testserver-key.pem -out private.pem -aes256 -passout pass:qwer -traditional + +echo ' +organization = "CSAF Tools Development (internal)" +country = "DE" +cn = "Tester" + +tls_www_client +encryption_key +signing_key + +expiration_days = 36500 +' > gnutls-certtool.testclientkey.template + +certtool --generate-privkey --bits 3072 --outfile testclientkey.pem +certtool --generate-self-signed --load-privkey testclientkey.pem --template gnutls-certtool.testclientkey.template --outfile testclient.crt diff --git a/internal/certs/data/cert.crt b/internal/certs/data/cert.crt index f80d61c..f814e3e 100644 --- a/internal/certs/data/cert.crt +++ b/internal/certs/data/cert.crt @@ -1,37 +1,28 @@ -----BEGIN CERTIFICATE----- -MIIGajCCBNKgAwIBAgIUGNi4GgCUssOOe3k0VuHf3R0+d54wDQYJKoZIhvcNAQEL -BQAwgY0xFDASBgNVBAMTC0NvbW1vbiBuYW1lMRMwEQYDVQQLEwppbnRldmF0aW9u -MRMwEQYDVQQKEwppbnRldmF0aW9uMRMwEQYDVQQHEwppbnRldmF0aW9uMRUwEwYD -VQQIEwxMb3dlciBTYXhvbnkxCzAJBgNVBAYTAkdFMRIwEAYKCZImiZPyLGQBGRYC -REMwHhcNMjMwOTE5MDcwMDA1WhcNMjYwNjE0MDcwMDA3WjCB8DEQMA4GA1UEAxMH -cmVxdWVzdDETMBEGA1UECxMKaW50ZXZhdGlvbjETMBEGA1UEChMKaW50ZXZhdGlv -bjETMBEGA1UEBxMKb3NuYWJydWVjazEVMBMGA1UECBMMbG93ZXIgc2F4b255MQsw -CQYDVQQGEwJHRTESMBAGCgmSJomT8ixkARkWAkRDMREwDwYKCZImiZPyLGQBGRYB -LjERMA8GCgmSJomT8ixkARkWAS4xETAPBgoJkiaJk/IsZAEZFgEuMRMwEQYKCZIm -iZPyLGQBGRYDd3d3MRcwFQYKCZImiZPyLGQBARMHbm8gaWRlYTCCAaIwDQYJKoZI -hvcNAQEBBQADggGPADCCAYoCggGBAN0vZbLXtRzd61rR8Hos0BGnqCaJXIwGARwx -JojMyxASFT+KeC4QDRkgRrK6OY4k/i7TEHuUGk/Bm754++554wmmhDqv1Q4+VhhR -1K/JAz/HVZNTAR1rPKwG82lyEpPxlRNZg/QtF9DqQSoSkL/fJLs+rq4zlKozXzRE -auZ5Be8So1dXRZfMVUMDgtk+IX8+iCeZisiWfv62ttQ0EiuiXLagd6ruEuoCSVi2 -tVswsC/Hp8AI2Ro56mmHiWthuae1H8yDWUFLSe9AQW65qC/xVUgo/nMpK2BYVFKb -70TMjl/dZM0Qn1tdiNyqCkbIhXjklZvZYhO+15TPkgDXDsqRUjpTrLZXLGrD6XIx -CRLZGY6YrUfsFTjUC6JrUrAR8zY7SLsYN5sUmFUSMpJnI+T/SD4p/0CXrKrbMOjW -Qqz6FX/WHPxvswGKHk5zHYGHrzx7OKmfVa6gzUgZSfOHj2xOOR2Un9DwNavIrmSC -WYXKZqig5qDyfzBvlXWEio/5GrDwgQIDAQABo4IBWzCCAVcwgcIGA1UdEQSBujCB -t4IrYSBkbnNOYW1lIG9mIHRoZSBzdWJqZWN0IG9mIHRoZSBjZXJ0aWZpY2F0ZYI3 -YW4gYWRkaXRpb25hbCBkbnNOYW1lIG9mIHRoZSBzdWJqZWN0IG9mIHRoZSBjZXJ0 -aWZpY2F0ZYIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIBLoIP -c2Vjb25kIGFkZGl0aW9ugg50aGlyZCBhZGRpdGlvboIHZG5zTmFtZTAMBgNVHRMB -Af8EAjAAMDEGA1UdJQQqMCgGCCsGAQUFBwMJBggrBgEFBQcDAgYIKwYBBQUHAwEG -CCsGAQUFBwMCMA8GA1UdDwEB/wQFAwMHsAAwHQYDVR0OBBYEFKrFhODjTKCopb+W -Qa29PsHR4HXgMB8GA1UdIwQYMBaAFCyZxCa1ZUHVy8LjikE8zumAiEgfMA0GCSqG -SIb3DQEBCwUAA4IBgQBTrAgh6d+qiLumEfmkNCmhewxKxDZp+Ni2nz9XRzNO2cQE -U0n8MdbnQInW3xJXng2sAcl1fQz0RN1hkyjDwi69mbbPgcTYmxJFvyt+zRYBe/Sq -4CGGkxEdPW94tMpQ6SrCn2mAMnvcq9A1pYBVYyPeUsaRHC5OUBFOMCevNy8JwNyY -MJ0H5HQCyCysbzA1d521pogGUs/tmbE+ym9zpV8vG0b6De1PexjVeGkTNYz6NCR2 -VZTQ+OJ5iE5pHPEC1Qif44LrR9Kdn/wu3RjTYyHeBOJFjK+DKgleNF4QVTcZQIPE -snN4H+/VSgTZQ3kgWbtpd1m5oRBJovEc2Qe+l+iDFCk8OA4z/x+fkvOeD3NUAl7D -9Pt3cP3UtWUJp4NJn2dvUljmQhB02HSqdNBhqKSg4/cf7l8Zo1ejvBUosrlgw3C3 -apDaC4/xk7woFKVYW25teH2ze+Gpz/YsLDtmL7Bri8CGVsqsN9yqO8SstwKBa3Rt -xQ2em6XnnanApT4iFX4= +MIIE2DCCA0CgAwIBAgIUT/9u6/HtTciy3NB6UGXu+U+UzT8wDQYJKoZIhvcNAQEL +BQAwTDELMAkGA1UEBhMCREUxDTALBgNVBAoTBENTQUYxGjAYBgNVBAsTEUNTQUYg +RGlzdHJpYnV0aW9uMRIwEAYDVQQDEwljc2FmLnRlc3QwIBcNMjUxMDE3MTAyMjM1 +WhgPMjEyNTA5MjMxMDIyMzVaMEwxCzAJBgNVBAYTAkRFMQ0wCwYDVQQKEwRDU0FG +MRowGAYDVQQLExFDU0FGIERpc3RyaWJ1dGlvbjESMBAGA1UEAxMJY3NhZi50ZXN0 +MIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAwqJ45WlBG5CqW3Meewsf +Es1tqQRsHS/L6Hlz/aTZQHte/Co18qklnza0ZvK0mbPsQ8HLKXfU6Am5yw3u6vZj +XNfhWDW4QtsSk9f/y/fBADw17qYinoVyLpqZU5Z6kFRY5npY0C9bCtsAZd4qimx5 +yu/MhM8LHI9K2oKPSkFgRCTRKAo9sZ97o4wZmTxJIasOr0SPpmfMLs2sHSEqcK4d +/RxZ+OtYtd3pmE/WjxtSozCkdAccvrH+TSAuF3+/6oBiov8yX0KPNEBiiwuDXMUD +QWkjfcrxQZAswMWRo55JJYBbIjrinW8vldLooFo5trNEE2nukgRPhvLhiJdKKAeg ++A8jM/Bx7JgjRCPppIEmWdvXg+CS6L0hGj49pg3OcIiNNoufoXPRkFqmRh72n1Oj +2RC13W8H3C3SDYz20mqJhkbci+05vO/LgKj9te8xEs/xa4xCtv7ycuB2Etzf1cWS +zfz5LGXwwLI0rjpx3OAsr5i8Fukxe5maYLS9AUCTetTnAgMBAAGjga8wgawwDAYD +VR0TAQH/BAIwADAnBgNVHSUEIDAeBggrBgEFBQcDAgYIKwYBBQUHAwEGCCsGAQUF +BwMJMEQGA1UdEQQ9MDuCCWNzYWYudGVzdIIJbG9jYWxob3N0ggsqLmNzYWYudGVz +dIcEfwAAAYcQAAAAAAAAAAAAAAAAAAAAATAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0O +BBYEFN2InaQvsu6hULCYeKc6pdE4VgVHMA0GCSqGSIb3DQEBCwUAA4IBgQBjPdXd +2xHzce3mi4RlANT4nOSdpELhl54xeJDgI9Evt70N8B4uTmOI5+F6JVICE25cnDs1 +c9SoHpWzh1ZuzfiBYa/cdQNUtaTfgHLi5GYtV1DzmKXVRUciBiNBWWxYMbTGvTOO +i3r6DEgOYuukeL4qj//EGOcTJEarHVSxPMuXTD/PoP/VpIdqRS9drEpFUC6lecZc +UJtUPAcyx0oD2vNmPmulDfYFMLLOPrIeNa0g7os4wgUl7+9wR1cPPRTXY0fW6Hoi +j+a8Qn80Q3PrOuEO/SZ4aHHpOk90bRqofyIhFjPwS0YN5w/Sn23uq1u2Dx+Zy+5K +6Cs9p5dJWu5/zU4ZdbQlpYIHXQVbido1TY92Z84skEsac2wVh7L2LMB3p3Gu9WYn +oKqFYCw5FICvRgyh1KG8QWhW59Em0Jxr8rTw6qyBQACdixKy6/1ok2ArMivTC8Gd +rEbefshgc6dnAZCAp1MjCU+tg9iYEymSSLdOtUKvHEIosUGO1p5ol0hReTQ= -----END CERTIFICATE----- diff --git a/internal/certs/data/private.pem b/internal/certs/data/private.pem new file mode 100644 index 0000000..483283c --- /dev/null +++ b/internal/certs/data/private.pem @@ -0,0 +1,42 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-256-CBC,3ACC7169D177F0159193ACAF3B3997A3 + +DjxOUO2dbAAeHUtP2jSW/7zpVTWeRaJi5Kce74r1TB2DJ4FYI8361ZZcOrjISGQJ +33f1Ic+8gv3P5ORzGAIfxzSmwQLk5y45da7of2dj69FXba+WoGNKgMS/KMmj+CvR +XylNJl4RE5zovePkPvk2JDvyjg+POMMu3UTOoxJzSTmifV6F7msuFTHMHhs3edSs +PUAHprSW7Qh5dYq3VK8tuqg9qdy3uLajpZkg9b9bBfaiku+SiRfwsdCjeAuubiJK +ctyPQclE5B1jEgJit6odjzsLENB9uCzkgq61UPoxbT6URZ0jJwhEZgh15UAr74QP +KAElD8Q7V2Z0w31vPhBcMIyrSaNlMr5p4teNFlMEZRa0lhNOXp7AY0DwBtioX2bR +VCxFTk409L/gVaweUnS0jzY0cj/pU1L1I5OWScDjCRkkj0Vk40S/zcy5esz85b5r +rGRxdRKqJIIZeb3r7WdvINFnNXL/KL/hxVruZcZse8cV3Na+w4rH+AHElMd51tZ+ +RKEBDqH0jlg3aelfAWXkV96pUtH/4lTSZ1+huQyHLUjTULll7L6BtxNGzY071buS +0CaTFyRcaipKYkXQjmrA49uTWQzrEgqiRZ4exh/gAaM/tEgVRfo/49Xo5wrTsGr0 +4Q0hBnUYAa+cVL7K8z2WAk1qerb1CsmiyjQZFI1S6z10ugS6zTDdB/kwW5ZvAzWB +/DXc9rJlgTFLbZK7Oty/IDayYkWD3BjfOV94oMeogK0eworAMxhvfIFkPxRHwhIp +9KfBw7xsa2gJECbi8BvrsV69PHn6EHmphn7NMpc8A3KmBFv1uOqWu9P7ef67+e+U +JprzVt2mUDoTUayzVkwQPy3rm5wWxVanHqtRXig3RN3pnreEv1AdfTKLfCxE2jvo +9fh6hNo3urgIL1KFXHjiXVRt03RGfpWfAI3JKqhkWOqZ7rVT19AuJ6On2J1dVMkm +TFelKdX97YlvMfNdKp1pkzOjZ2f4ehL5WCkMq88VgDrTmZv+CfcnrRslsLP6MSpX +scAMFDdkzSBUH3NyHxxkstcs5xQm1SuPN/omB7rpYgfhD6HwdgZNEAINtMNgIIoR +tW34hGkV6BhI+2y+pkIndm63JVikrbuLKiwTjwynFJWKTWgRBMR/BvJ1Bq/IfJNo +pC/hIpN95vUbHGzHRfmO9v5HiaAaBYGs59gL6WS0OlsyFXMr6a9ZmBDbZ7TD94Ax +IAhGhRE+5OpF/kWLfOriXMEbyY/oNoN1y7jdpMdmncq2/26/OhL8RFUKPlCbz0LN +5FUv7ouW8kvUgy5tGu78iPu6MNI+BzqLg+TrUu2bufajS+/VGAFo/2PX896n+2FJ +cP2DXlmFgC6udIeWsGNJI8Y50fC+YZxN+UthLOctiOgM4pGK1UDl8JQLbt0xRrJA +MI5XkbXJJYBdjHaqg8WGF260UgWhlD9sdJc7ntLX9S+3DoOboSwmYu4Y8p15e4Cg +8LHgW4NmnBFPX5/oyYMVCt7SWEnnwGEeebu+YgD9fbFAsag5TpE04zpx58rCW5bh +sJqRBCcZE5rqO9CUF1fYu0F24fv+E3LK9lujCMARVfJk8CLUg7VFL9dY2XWEfHsO +plZ0lmc5BntBoQ5r+xK/6TbK5nn1Fo+JPRjnDaE++QdVx9ZVjtT+a/wCD5NJr13k +dByZ3eCz5+mZUBGD1PWh5C+iyL3Wpq29b3EsHfSIMzOZsCpY2jkC8Jr90ADxhZcH +j8wFXHIWCe+Nn89Zim53gvbzumspRj8Yb08RATruqpvwj3M/K5K6P92Lt3uqt6UB +W+tAcChHNNWHFIT5CtCV/rltJYe2c9k9yG6BZJeLWPYgq90dFkIqbdkiz/pVpmKS +WMMzvkaK+LEcv+M9eMUQPdPYWhwv67wAlUsdLVWyQtxoYcLPUY3Io+Smn8eE+Qz7 +bxkSX+59QB3eCXrNGKTFsBiNDlxl+9YH6U9XhwIGyHlnBgN/79ts0ZutIpOibIWg +WPc9Cp5nkjjQl/4y8RSea8KSlkmM9YeTEo8cEL57XXOr1OO8UEPn/Ogoo5TI7JXL +jGh4evOcfWbiXZbn9kGshq1Kmv+lhN5IZ8QJY0s5Ze1eURnu0zlqKvFe3PxDxHV7 ++PaM8MneRkT5B8QgC7prh/yJ0KEI2MyIcYP73fw8cOLTXenw0bpmKLLfxu8mSx4M +VEDqeZJUb/XwsZTd9VT+42p4YT/6wRAe9eU3zA9wKh4Sr96vUGTPktXcpxCjoBre +3IaF/6aeyRQn91Ps9XmOc0/KSxZmHMxWv2btVc4oLHawnyRlLXXT7OSG4FFR7eE8 +IRoCCSip6YnIflp1v2n1f/07SzfKtrtVdiW1u2lbBJtwuzN/h8TtwRJan5bKWV/6 +-----END RSA PRIVATE KEY----- diff --git a/internal/certs/data/privated.pem b/internal/certs/data/privated.pem deleted file mode 100644 index 354598a..0000000 --- a/internal/certs/data/privated.pem +++ /dev/null @@ -1,42 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -Proc-Type: 4,ENCRYPTED -DEK-Info: DES-CBC,054A583F6C90570F - -tlGw8qlO25FaQdRLkai5L1JHWz/5fC4zd3qFISWssYH2FEnz8yfYsCoRLivVYhJB -fswOTj9h5b1RYRsWfIwCGfyNeOj8hkQrLwCW607vbhydGGJ4xc5RBF9MK0QCjSNT -r8myedNyfI4nm5enNVFDqYsqAc7cA3m1qw+QsAhPOrASDTp5svHR7g9+T6P5GDHm -B79nap02kfmodC7ytmWDBEclJ45Y19LOucN0+Nl6JgKkQEfWB/p2s2kGAGY1Of3X -/ERPOqeqZdFSdPDyX+mrzjGVhypgjBaz7XRh8OSeW8UP70rE+9aZKn9fIs2NyYMH -wwCElUmFV1Ye+/JtE4+Rcu6pG7NrX1rAC+pqPZaF8PT/kEuawiwrMuU0RP/8Y6mn -PRZZGZhXwBcfWPDN+JIj7e1NAXynwP/d4Pc4nb1O6EG3/Yip+F9NNaNbEfS4z9eV -Se7Gr/ySwxFhww9KhMtFYhkb6DVzy7StXpDqDmLhaF+qGCl86XRzZHho6EwQi+9r -c3VXbgogbjwIP8OgAKIZLuMxETZb0rvOr87sMAiqWRx+gRhryNniNr70anY8Vkpl -jcw6SJdqWuvOGaKjxWgdcHOzHdISEu/W6z8euTzMxX6/C7hBrKT8Edt71Jha26a5 -ZZNDH2XoqDphelfCbrARhw4P++KcnhPsY2da5cJ4021dfwXQGbGjcW1EAR3tCP/U -NKWc8Wm4dzuQSMqJERbWlXL8/UuvtyJR8VgNueg8EAHXCWBCS9i1i06gla9gPbdy -erhMDtUsJepFPDZVuqvm0dIjBaldl+74FHnPQ6+qFHXy6f71bGOmbonspnApqoeP -gc4zB65Nv+ws//XfdgwHhmtUkWS2ANPNQhU9o92l8XlqKicGC72dEEsR2TMS7fEW -K9/d06ZGu83FEXL43OXN79JmkpblonCWRgyVF7WPGufm+dtmR5zlIQruW2FJVwPZ -QmOioJYlSopOztyyBIuhZaNwVDQgoFtwHKRWAUseodzmHuPpvWCBjlL4hebJ7O0T -HGHGddqam3IPmyradhk0o1Qb54uk9rrzKWjcOEw850mJt3DnkHRNRgY96Gg0fA+m -+UxEOuGPvOudOMtC32vDKwAZ9eGgxAKea/kvaLFdPqwiq3B+IBetjSYGZ2kxVOAD -K8rHH6bnzrrasKHfOIBpw4MsiAG19sW1fFL61v5OXTcLOEQ/UVC8WinSj3JK894O -XjETyg8zvH+bYdlv9T2SGvAAzv1bJ3Iw9kb2VK0ZgwfwQgKpCDe6PEFLP7K2NNdF -zSw1GHOiDewsMD7VSfkmtevhzTOcQd/3uoyn/5ftcvcbqI4CGxP6kOxmul3NdfYl -insi95+IuhkSUQL02AdkI3SQhSnfmFRZSsy6JTXSN/7XOOzRFyMJcR1WlXOKFpt9 -G/bYGjVmfxtRqH4ZO7irCPiM+ZudXvPCl5VhZReBsJeEJcNuR36QTJIL3RQHyKTD -9Z12PegrgPXDgkSns1s8phTu+GygIEh67yLPbPYohYYbJUOkab7Il3JauihnuMSP -2BDDbwdvL1V7TQCmnopNb1srZj3q/1eWKmik2U1kvc78c3W03NC5wFETic2QCM9z -u/IaKAjO/kvSB8+ClSYaZDVLuBgUHf0DSG9cb5eoPqFt3t4zuWQhQjJR1YlLtQsJ -YSQFf0WqGj6sA2+AIy6Fv3oitlOPtRi/2seZ8ACSqxbwUFf3to8ZA3rJNoaYLvsT -sz++DrA8oHr4eDOiCoLeU6MLNiUvB6RGtjDwhQDh2LoJJyAdh9wB3vaAmEJ1u3o4 -cGyTCxbbkxRCWhMWW4NJbvdZORYhhhIu+TH5DaLgsZS1n+UF/amKQ0m8sj968Uo/ -w05QBNm/F3zg5dpzyW7uEfti8DaP/apDcf1dHSpk9ERkJ/QSIdgzGmrROQvh2tF/ -nvubXXMAex0tXFS6eyIZVgkT1S5eF001DsxIlp/jY6oFUYHquMcOQkyRAvUTvLO1 -pkexrPYrmx/alP71nNrBfixSTHMuPVb2jC38ElzllgxHfaaI5Q1hef4lVaErNaQ3 -m1hvE7dYkNomTt9fu/LHaxtw/P1eBlL44QcfqdqL67ROES+fB27d8vbajm1EQraw -QUoY+NM5KeQyKeRPWxDVQwAv02Lof/FSiB01yNqrzmRojtTykKB5VrnIA1DDP2vI -SoZjPZOSIJHh3qlDaKxlGOQD9Wp4OtIPLqxpBmRgGcq2AVtm57jRAF634nTGvB+N -7fvMpBay3EZy3sauM4MZk7bytJKK6huQjmER+GM/F/Wyw28L7rewK8ukPKx8Wybc -ljVLrduRPt97JH4WWejy+k5vv4LHWJLsGGU474YHGMXF2VE3kJ3JKj8Wm5gS6p/p ------END RSA PRIVATE KEY----- diff --git a/internal/certs/data/testclient.crt b/internal/certs/data/testclient.crt index f46f386..6cfd9fa 100644 --- a/internal/certs/data/testclient.crt +++ b/internal/certs/data/testclient.crt @@ -1,27 +1,26 @@ -----BEGIN CERTIFICATE----- -MIIEkDCCAvigAwIBAgIBFDANBgkqhkiG9w0BAQsFADBKMQ8wDQYDVQQDEwZUZXN0 -ZXIxKjAoBgNVBAoTIUNTQUYgVG9vbHMgRGV2ZWxvcG1lbnQgKGludGVybmFsKTEL -MAkGA1UEBhMCREUwHhcNMjMwOTA0MDcyMjAzWhcNMjMxMDI0MDcyMjAzWjBVMRow -GAYDVQQDExFUTFMgVGVzdCBDbGllbnQgMTEqMCgGA1UEChMhQ1NBRiBUb29scyBE -ZXZlbG9wbWVudCAoaW50ZXJuYWwpMQswCQYDVQQGEwJERTCCAaIwDQYJKoZIhvcN -AQEBBQADggGPADCCAYoCggGBAN0vZbLXtRzd61rR8Hos0BGnqCaJXIwGARwxJojM -yxASFT+KeC4QDRkgRrK6OY4k/i7TEHuUGk/Bm754++554wmmhDqv1Q4+VhhR1K/J -Az/HVZNTAR1rPKwG82lyEpPxlRNZg/QtF9DqQSoSkL/fJLs+rq4zlKozXzREauZ5 -Be8So1dXRZfMVUMDgtk+IX8+iCeZisiWfv62ttQ0EiuiXLagd6ruEuoCSVi2tVsw -sC/Hp8AI2Ro56mmHiWthuae1H8yDWUFLSe9AQW65qC/xVUgo/nMpK2BYVFKb70TM -jl/dZM0Qn1tdiNyqCkbIhXjklZvZYhO+15TPkgDXDsqRUjpTrLZXLGrD6XIxCRLZ -GY6YrUfsFTjUC6JrUrAR8zY7SLsYN5sUmFUSMpJnI+T/SD4p/0CXrKrbMOjWQqz6 -FX/WHPxvswGKHk5zHYGHrzx7OKmfVa6gzUgZSfOHj2xOOR2Un9DwNavIrmSCWYXK -Zqig5qDyfzBvlXWEio/5GrDwgQIDAQABo3YwdDAMBgNVHRMBAf8EAjAAMBMGA1Ud -JQQMMAoGCCsGAQUFBwMCMA8GA1UdDwEB/wQFAwMHoAAwHQYDVR0OBBYEFKrFhODj -TKCopb+WQa29PsHR4HXgMB8GA1UdIwQYMBaAFI6GhktAq9L2uRChC9LcXeedKiUg -MA0GCSqGSIb3DQEBCwUAA4IBgQAbUDaIkmubooDde7BpZQx742BsPg4IN68bIg9A -3jI9codx9c8l9ROvZ/7FeRNXzhYrQUwzcKpwtQ1mB7kM85oXaTLxrtnkZAO2fFSb -8RA6QjOrnOvewWaO3moCZaPnN1wWtlnUev2tD7D2Tz/f20dE2wbDV0BGb8bU4eGI -UVgzYrMh0MHaC8LKoXUWP97jp/p+9CG4D2S1CmpzP2Nm1dS03oj4UHIUtamjivYY -vOeoKATXmj59lgYqqoAVbTH6f4mZlZGmzUhRxK6hck7xBdiXAwfta72m4WzE7HRh -nHAgO5aVWb6zltvVDJhYumB9Itv+LI7uU8fF9Uyc65SZ2BevxgikoDNxTx0oNr+4 -hExQhJfKuPFF2NI1N2tPYJT53Cek/ZJfjX3TyBneqehthtRqoAIIEaF/QlXqzJIi -G66YFC3xFlLmaQh52DJkF2+hzcPhFTVQv3yCirGLUSS9Nm7vTO2wnnW5arZazSV+ -enRZb3oiVYFVDh0Hymz9g5VraMw= +MIIEeDCCAuCgAwIBAgIUTqTcNqmr8Ou/MpL1AUnM/3gcoUkwDQYJKoZIhvcNAQEL +BQAwSjELMAkGA1UEBhMCREUxKjAoBgNVBAoTIUNTQUYgVG9vbHMgRGV2ZWxvcG1l +bnQgKGludGVybmFsKTEPMA0GA1UEAxMGVGVzdGVyMCAXDTI1MTAxNzEwMjIzNloY +DzIxMjUwOTIzMTAyMjM2WjBKMQswCQYDVQQGEwJERTEqMCgGA1UEChMhQ1NBRiBU +b29scyBEZXZlbG9wbWVudCAoaW50ZXJuYWwpMQ8wDQYDVQQDEwZUZXN0ZXIwggGi +MA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDBN4fIBbwuGJXjXoa6F7e4Zzin +Yd9EB4nt5TkNoMkRgQe0JIJ+t1/lS/xlI7ATxNjUdybnYwCrEfDvy8XGwN6te+Xh +dz6HKDWPijW+ritQW9kouxJJSpna95L8SqU4tjdfyL/2X9E/7j3VYw1//zcmhLJg +1Os0+JHPcPuj1vmwLa1v7eGTCNlt0K8DbrlhPlteJB3hWolNIoVDjRemZFmqwUeV +GZ/XJos7OTB07p08yCOFhLl9jXCgEDDkKmcnAil3YhjudlEGSjdzFLskVD4xrtQ5 +GsbdJHyHhcUdgh+vqX2bFSklwdwVil1qIUEHnxpcRMaluZQ4u1tCgNhKNQHrJzVQ +n1aRVAYdX1PxfoIb5wt0+25MiVw8y8EcrMH97Ss26eNAtLeHZNrY9alqx/Cs8gOi +I8wA2Nga138tZuCJRXsDOnom9RrtdPLajhSb7n33Iq8ZDhYVGEIm2pc5MJxaI53V +e2WhmemFPfYwUAtzdGgwrBoY9MechdtNLGZqHxECAwEAAaNUMFIwDAYDVR0TAQH/ +BAIwADATBgNVHSUEDDAKBggrBgEFBQcDAjAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0O +BBYEFBVaa/ovyPpbk/8nlmuISWB2/t8xMA0GCSqGSIb3DQEBCwUAA4IBgQC8EeDx +qipM7bAFxkAOmvhNAjodKXRCWKhatD8HryeINOPgWajzLlHj/PCnULulhaFO1viA ++iBBKbHb+7LImb/owlNVu8iYDh/xBXmLrOHyd12K8dyN471iTBrskQwSCnYd6e/p +4i0hhNj5JidOgA6swjt9j4X7/IgsvXexLIAhqgQDSsKQpPK17E9IB+d5p3UHU71w +Ob1mGIZ2j/GJnC6YmGFPqIZZ+cy3aVOypWf8RVZYPTFCz43ZuC70cP3kl2io75Rp +rWUNKXU+yUdBphHN6KJXUmlH4T9yqXKqnxK+9CnVC/CTlucF9VpktN7wfVxVPsrY +L79iys+FLPKrDkqcjpIJ2l/n/ugcUcXvN477qFCGbRY/3tB3Dmf4AvMPpTsStkXw +Ld+xAHog8upjVGsmXODX4sKjRMIFLIHbM01Iw0ECdKoKIMwjFGenwGmpBZA/Pfxe +AXBejd9KD0stCfHuKqx7Iu5N7Fg8BCLzmcSyoOmwJEo+Z3Z42IfSjOX8rQQ= -----END CERTIFICATE----- diff --git a/internal/certs/data/testclientkey.pem b/internal/certs/data/testclientkey.pem index ce2726e..4e8b564 100644 --- a/internal/certs/data/testclientkey.pem +++ b/internal/certs/data/testclientkey.pem @@ -3,180 +3,180 @@ Public Key Info: Key Security Level: High (3072 bits) modulus: - 00:dd:2f:65:b2:d7:b5:1c:dd:eb:5a:d1:f0:7a:2c:d0 - 11:a7:a8:26:89:5c:8c:06:01:1c:31:26:88:cc:cb:10 - 12:15:3f:8a:78:2e:10:0d:19:20:46:b2:ba:39:8e:24 - fe:2e:d3:10:7b:94:1a:4f:c1:9b:be:78:fb:ee:79:e3 - 09:a6:84:3a:af:d5:0e:3e:56:18:51:d4:af:c9:03:3f - c7:55:93:53:01:1d:6b:3c:ac:06:f3:69:72:12:93:f1 - 95:13:59:83:f4:2d:17:d0:ea:41:2a:12:90:bf:df:24 - bb:3e:ae:ae:33:94:aa:33:5f:34:44:6a:e6:79:05:ef - 12:a3:57:57:45:97:cc:55:43:03:82:d9:3e:21:7f:3e - 88:27:99:8a:c8:96:7e:fe:b6:b6:d4:34:12:2b:a2:5c - b6:a0:77:aa:ee:12:ea:02:49:58:b6:b5:5b:30:b0:2f - c7:a7:c0:08:d9:1a:39:ea:69:87:89:6b:61:b9:a7:b5 - 1f:cc:83:59:41:4b:49:ef:40:41:6e:b9:a8:2f:f1:55 - 48:28:fe:73:29:2b:60:58:54:52:9b:ef:44:cc:8e:5f - dd:64:cd:10:9f:5b:5d:88:dc:aa:0a:46:c8:85:78:e4 - 95:9b:d9:62:13:be:d7:94:cf:92:00:d7:0e:ca:91:52 - 3a:53:ac:b6:57:2c:6a:c3:e9:72:31:09:12:d9:19:8e - 98:ad:47:ec:15:38:d4:0b:a2:6b:52:b0:11:f3:36:3b - 48:bb:18:37:9b:14:98:55:12:32:92:67:23:e4:ff:48 - 3e:29:ff:40:97:ac:aa:db:30:e8:d6:42:ac:fa:15:7f - d6:1c:fc:6f:b3:01:8a:1e:4e:73:1d:81:87:af:3c:7b - 38:a9:9f:55:ae:a0:cd:48:19:49:f3:87:8f:6c:4e:39 - 1d:94:9f:d0:f0:35:ab:c8:ae:64:82:59:85:ca:66:a8 - a0:e6:a0:f2:7f:30:6f:95:75:84:8a:8f:f9:1a:b0:f0 - 81: + 00:c1:37:87:c8:05:bc:2e:18:95:e3:5e:86:ba:17:b7 + b8:67:38:a7:61:df:44:07:89:ed:e5:39:0d:a0:c9:11 + 81:07:b4:24:82:7e:b7:5f:e5:4b:fc:65:23:b0:13:c4 + d8:d4:77:26:e7:63:00:ab:11:f0:ef:cb:c5:c6:c0:de + ad:7b:e5:e1:77:3e:87:28:35:8f:8a:35:be:ae:2b:50 + 5b:d9:28:bb:12:49:4a:99:da:f7:92:fc:4a:a5:38:b6 + 37:5f:c8:bf:f6:5f:d1:3f:ee:3d:d5:63:0d:7f:ff:37 + 26:84:b2:60:d4:eb:34:f8:91:cf:70:fb:a3:d6:f9:b0 + 2d:ad:6f:ed:e1:93:08:d9:6d:d0:af:03:6e:b9:61:3e + 5b:5e:24:1d:e1:5a:89:4d:22:85:43:8d:17:a6:64:59 + aa:c1:47:95:19:9f:d7:26:8b:3b:39:30:74:ee:9d:3c + c8:23:85:84:b9:7d:8d:70:a0:10:30:e4:2a:67:27:02 + 29:77:62:18:ee:76:51:06:4a:37:73:14:bb:24:54:3e + 31:ae:d4:39:1a:c6:dd:24:7c:87:85:c5:1d:82:1f:af + a9:7d:9b:15:29:25:c1:dc:15:8a:5d:6a:21:41:07:9f + 1a:5c:44:c6:a5:b9:94:38:bb:5b:42:80:d8:4a:35:01 + eb:27:35:50:9f:56:91:54:06:1d:5f:53:f1:7e:82:1b + e7:0b:74:fb:6e:4c:89:5c:3c:cb:c1:1c:ac:c1:fd:ed + 2b:36:e9:e3:40:b4:b7:87:64:da:d8:f5:a9:6a:c7:f0 + ac:f2:03:a2:23:cc:00:d8:d8:1a:d7:7f:2d:66:e0:89 + 45:7b:03:3a:7a:26:f5:1a:ed:74:f2:da:8e:14:9b:ee + 7d:f7:22:af:19:0e:16:15:18:42:26:da:97:39:30:9c + 5a:23:9d:d5:7b:65:a1:99:e9:85:3d:f6:30:50:0b:73 + 74:68:30:ac:1a:18:f4:c7:9c:85:db:4d:2c:66:6a:1f + 11: public exponent: 01:00:01: private exponent: - 14:ff:c0:f9:ff:bc:b4:26:e5:87:53:d3:2e:e6:3e:42 - ce:d6:0a:02:94:84:be:b5:30:46:02:50:8e:90:e0:cf - b6:b0:b7:a6:bd:48:cc:d5:8b:d8:ea:72:ff:af:dd:17 - 3c:be:d1:1b:ca:6d:cd:10:a6:86:a8:d9:d2:44:44:27 - d0:65:51:65:0c:27:34:07:dc:7b:38:64:10:03:7c:f4 - a1:cd:40:de:24:3a:e0:21:bc:ef:33:1d:9f:61:e8:57 - ac:e4:9c:c0:7b:df:7c:f8:20:83:ac:0b:8e:0b:d3:62 - eb:8a:8e:03:5b:a3:e5:08:ae:df:a7:fe:85:92:e8:a5 - ae:58:46:72:d6:fc:91:43:b1:7b:a4:c0:5f:51:c3:50 - 0d:e2:67:e8:af:51:13:41:a9:8d:ef:fb:a1:a4:e2:84 - 7c:2b:a0:50:c5:fe:ed:84:a5:25:83:86:4a:d3:0f:56 - 37:38:e6:1e:26:7d:45:22:0b:ba:22:35:be:f8:8b:1b - 72:90:13:c4:1f:c5:d1:34:b5:0e:b2:ee:f7:e1:b9:5e - a2:29:8d:f9:6e:23:4b:50:8f:35:c8:a9:f3:d2:1f:dd - ce:a0:96:50:2d:2e:af:cf:b5:e1:20:e7:e9:d2:49:ed - b5:0e:5b:3e:d1:4b:f1:fa:c2:73:3a:1b:51:34:7e:75 - 30:06:d2:47:d2:a8:2a:45:be:16:fb:8f:63:84:85:b7 - bf:f7:c4:c5:3d:95:56:8c:d1:02:7f:58:ac:4d:11:7b - c5:55:f3:c8:4e:d7:d9:aa:62:b0:e3:1e:04:5c:97:d1 - ca:e2:71:aa:8b:33:b4:34:e9:04:d4:70:7c:f4:cb:57 - 19:c1:03:23:f4:bc:4d:91:8f:b2:9a:99:1c:6c:81:2d - 4d:2d:e9:a1:e3:ce:e3:c9:62:52:89:1f:47:86:61:f1 - dd:bc:46:8d:79:0a:99:9d:aa:4b:a9:0a:72:54:db:dc - ae:48:be:60:4a:73:99:d8:3c:9e:07:78:05:df:87:39 + 70:0e:fd:af:d3:2b:ad:6c:52:d9:f8:43:99:00:12:6c + 5f:69:2b:22:87:33:54:4f:f9:69:fc:e9:db:7b:61:ac + 7c:c4:4c:7c:66:73:81:a9:61:a5:73:1e:fc:8a:aa:9a + ba:b6:94:18:94:81:99:b5:a1:0f:e2:15:c5:4c:ac:98 + df:07:96:f8:ea:89:c6:97:31:b5:8d:b0:16:21:46:cc + ce:28:62:3e:9b:c5:29:70:26:2f:d8:24:8e:a8:52:7d + d1:0e:83:ce:a7:09:9b:d3:57:87:3f:98:5f:c8:ab:ba + aa:31:2e:19:ae:84:1d:39:ab:9e:b2:42:f6:75:ff:68 + ae:73:00:fa:d7:a4:c5:3d:7c:4f:54:65:4e:1c:88:e6 + c2:b5:9d:a2:ca:38:61:45:09:17:01:68:5a:f7:4e:4d + cb:24:f1:e3:57:a1:97:58:1e:b3:ef:57:91:e0:1d:95 + 51:8c:a9:4a:4e:f7:cd:fe:f7:04:f3:ff:67:ad:e7:01 + 14:dc:7e:e4:00:c0:38:51:2f:04:db:39:6c:f1:1b:a4 + a5:f1:b4:5a:c3:17:d2:41:1a:5a:b5:f3:69:3b:b8:ba + 7b:59:96:d7:b2:c2:2c:9a:dd:e9:42:ce:fb:c8:22:fc + c5:33:97:6d:68:89:cd:e5:bc:2e:cc:9d:23:65:18:04 + 0c:83:b6:35:7e:16:09:96:d1:48:61:31:b1:ce:f8:50 + f0:14:ba:57:2f:02:1b:61:9c:bc:81:c1:ef:b3:bf:2f + fb:36:af:18:8c:90:40:55:5a:fd:a7:d4:ed:3b:94:a6 + df:ab:eb:6c:d2:bc:e3:80:7e:d5:06:21:28:9b:04:65 + b5:cc:04:b2:44:e9:2d:3b:7d:de:24:90:8d:fb:90:2d + 40:17:51:cf:a7:fa:ee:54:89:8f:c0:f4:e4:c2:bd:44 + 94:1d:8d:fc:b7:d7:05:4d:46:dc:63:1f:7f:d8:b4:8b + 11:db:37:be:4d:e9:2b:33:b9:6b:8c:a7:f0:43:56:c5 prime1: - 00:e9:63:0f:d7:49:31:27:a8:36:fe:95:bd:8d:05:c1 - 35:48:2e:03:4f:a6:57:54:3a:a4:95:3f:8e:9f:28:7c - d2:df:af:54:36:9e:7c:9f:c3:b9:64:8f:c0:b0:96:3c - aa:01:f6:9a:be:83:e2:85:20:0d:33:de:88:97:af:6f - be:3f:53:5a:a3:77:02:fd:81:17:91:3b:b2:2d:ab:78 - db:d9:43:db:04:69:82:61:30:e4:96:ac:88:8b:f6:3f - 56:c4:49:fd:d5:e5:8c:9d:30:ad:cf:d9:8d:5c:87:b5 - 27:4b:09:8e:19:ed:e2:11:3f:69:b2:47:be:70:39:11 - 41:a3:db:bb:b9:0e:e4:7b:50:d0:d2:c2:89:81:36:b9 - 6b:a6:fe:94:5b:06:66:e6:ed:86:52:42:5e:a9:0e:18 - db:18:f9:14:21:3d:e0:3c:8d:79:c3:f5:d2:cc:51:65 - fb:1c:49:ed:0a:d5:33:99:34:16:f9:1d:68:4a:78:da - 5f: + 00:f0:57:25:fd:aa:7e:98:13:08:28:99:16:eb:af:2e + 22:f6:e6:d7:bd:df:49:57:17:71:bf:21:ba:bf:75:54 + 5a:38:92:64:8c:4a:10:d4:4f:77:18:44:c2:79:f0:9d + 72:26:2e:9a:27:5d:e7:41:0b:c6:65:cb:fa:89:6d:9b + fb:87:78:e2:87:22:d4:92:21:f5:3a:57:fa:b0:bf:bb + 66:a2:bf:43:af:e8:58:b4:e2:a1:ed:97:62:09:0d:49 + ca:4c:99:a2:f4:f3:31:df:80:8e:56:be:64:9d:72:59 + ef:e9:db:4d:a3:e2:cf:79:1e:99:89:b2:f1:e3:2d:bc + 8f:a0:2a:2f:a6:f0:21:18:2d:f1:57:20:55:c1:c9:18 + c1:64:c6:9c:00:df:b2:54:55:8d:fe:d3:46:a0:5c:2e + f8:f7:10:b6:27:3a:4a:79:a1:14:b1:0c:c3:72:5b:2b + 66:d6:85:2c:7e:58:72:eb:33:62:73:34:e5:38:87:2e + 17: prime2: - 00:f2:9d:ae:5f:bd:b7:a3:87:a7:8d:30:46:06:8b:15 - a9:e5:a9:58:1c:2b:3a:7e:78:35:36:56:31:42:df:46 - 87:e8:57:0d:6e:99:de:cf:fb:a8:72:16:71:4b:b3:ad - ed:74:07:cb:cf:7d:2b:12:89:66:c4:0f:8a:ea:e3:37 - 17:2c:75:92:11:7a:a6:da:29:24:33:9b:69:c2:64:68 - 03:db:31:de:fe:1d:a2:4d:9d:91:9f:f0:50:b8:8f:d0 - 22:11:b9:b0:95:98:5e:65:bf:45:97:9b:35:f2:98:27 - 46:7c:b2:86:eb:7b:8b:57:f2:c3:49:47:7d:01:4a:9a - b0:e6:67:05:e5:61:7a:ab:63:c8:cb:d8:44:69:88:72 - a5:a9:60:89:60:df:e6:d9:4d:16:2b:35:7b:20:00:f3 - 3c:d1:78:f9:22:eb:48:c3:7f:78:63:e6:34:60:48:30 - 66:02:bb:38:c2:94:2e:b9:86:b2:2f:9a:4f:17:7f:e1 - 1f: + 00:cd:ce:5d:fb:04:16:34:f4:de:02:7d:00:07:3e:b0 + 94:8c:f4:3a:62:05:37:1a:4f:d8:40:2e:31:11:07:77 + 09:8b:bd:76:6e:85:b9:43:df:3f:86:cb:db:6d:fe:c6 + 4c:ca:e1:16:ce:5c:0e:e1:b1:10:0d:8d:48:99:d7:43 + 7f:6c:b6:20:b2:cd:0c:56:26:02:18:81:e1:67:e5:cd + b3:66:1e:77:dc:49:6a:5d:8c:9c:0e:24:14:3e:a1:4a + 7e:cf:72:e6:e4:03:e6:38:41:fa:2b:91:71:6c:33:b0 + ec:07:3a:be:5b:f8:74:f5:e4:1f:9c:c4:d0:d4:75:a8 + 35:09:05:0f:7f:54:4e:2a:bc:cc:92:de:1e:f4:74:8a + 56:36:e0:b1:37:cf:b3:9c:57:05:76:59:69:c3:03:de + c2:33:0c:c4:a1:4f:2a:b8:3c:20:63:c9:58:96:1a:e2 + 62:ce:bf:fb:a9:51:b0:66:99:35:d6:d2:60:59:72:bd + 17: coefficient: - 00:93:3e:7c:b9:ea:87:52:37:fa:d5:0a:36:fb:e1:d0 - fc:62:4d:00:0b:ad:a8:fb:bd:34:53:96:c2:6c:a1:6a - 49:b7:a0:24:33:16:95:79:14:ac:bb:75:8d:78:e9:10 - fa:be:44:60:58:94:4a:9c:ba:64:1d:86:27:8b:7f:51 - 4d:80:b0:ff:7a:91:c0:4d:a4:aa:d1:f1:79:7d:8f:71 - 49:12:73:d4:44:5f:0c:2e:55:a6:d9:13:b8:3b:e5:dc - e1:14:98:7e:eb:5b:60:ad:d7:4b:da:c0:d8:3f:bf:70 - 92:53:8c:31:6a:8b:61:5e:a3:7d:ff:84:2c:7d:ed:9f - 74:29:9a:e7:14:fb:c3:ab:8e:9f:60:6a:98:ab:86:0b - ea:fb:ff:20:2f:3b:a7:76:03:3a:55:bb:b2:c6:9c:b5 - 66:36:b8:1c:7f:9b:b6:62:89:ff:6a:d6:35:58:0b:f0 - 55:27:01:f0:67:8d:88:3f:74:48:3d:bf:8c:fc:05:62 - 47: + 33:6a:05:3e:1e:46:46:58:e2:61:38:6a:c2:8f:77:a2 + 27:b7:19:38:75:40:d6:8c:87:bc:65:a6:24:c3:97:e5 + ef:70:1b:2c:4e:9c:08:ca:1d:eb:97:11:74:14:bb:99 + de:22:a1:6e:bc:6c:c6:25:98:8a:8e:17:f4:f9:4d:a3 + 1d:01:5e:26:0e:b4:e8:1c:aa:06:7c:66:b1:89:5a:b4 + 82:65:d1:bf:20:cb:b2:57:a8:af:7f:00:07:00:7c:5e + d4:09:60:0c:0a:6e:a8:e1:16:1b:04:95:b1:bc:2b:35 + ad:80:78:0a:0a:1d:5f:c9:cc:24:3a:5e:20:03:50:44 + b8:b0:f3:f1:17:ff:41:b8:5d:56:9b:1c:f1:e6:2b:c6 + ba:a2:8c:18:25:8c:d5:90:f1:28:66:29:bb:40:3d:b2 + f9:65:99:2e:b7:1b:e3:d0:d2:1a:d7:96:70:cc:f6:74 + c5:2e:bf:f5:c9:60:c0:ff:38:f8:a8:db:1a:7d:6a:4e + exp1: - 00:99:16:2d:91:dd:a4:ac:8a:9e:68:27:f8:89:c4:38 - 93:a6:a0:e7:f3:1a:fd:35:76:b1:f6:64:16:3d:37:e5 - 88:bc:c8:d8:c8:6a:f4:fc:26:fa:38:88:42:b0:92:1b - 80:b8:80:f5:c7:f9:e2:5f:c8:42:60:bf:9b:81:43:c6 - 5c:58:55:68:a2:c8:b1:e1:6f:07:f2:6f:e1:d4:2b:21 - bf:b3:a7:da:c5:ee:1f:63:79:1a:b7:ea:bc:36:72:73 - e1:8a:27:ae:a4:db:49:7c:e2:2d:60:a5:27:20:86:b3 - c0:ee:6b:7a:16:6f:ff:55:a8:ee:bf:ce:67:90:5d:1e - 80:9b:e6:ca:1f:fd:30:c9:e2:9c:d7:62:5b:a7:b2:29 - b5:ff:78:06:00:1f:16:e8:6a:ed:2c:8f:f4:5f:97:ab - 9e:2b:a7:56:18:e7:e9:6a:4e:b2:8c:63:76:be:26:b6 - 6a:1c:88:31:40:65:d0:ce:b1:68:50:47:85:dd:33:a0 - a9: + 5c:1b:49:f7:f9:0b:23:04:c8:2f:a6:db:dd:de:f8:f3 + 75:63:ea:72:5d:cc:21:90:5e:8b:3d:45:f0:71:ea:ad + d8:d8:61:a8:52:0a:39:13:6b:34:e5:c5:12:2e:60:68 + 8a:b1:79:6a:74:d6:57:5b:47:e1:63:56:d4:ac:29:07 + 30:57:e7:98:9a:84:94:ac:66:ea:c1:24:d5:ef:e4:c5 + e4:c1:20:13:9e:1b:c0:d6:c9:ef:e0:00:36:2f:dd:83 + a5:ef:8b:40:0c:a3:a4:60:04:2c:c2:32:95:14:69:db + 43:e8:43:cc:f6:f3:44:1b:b2:03:cf:8c:5b:df:ff:4f + 9b:b6:0f:25:0f:09:df:d6:5b:93:64:54:f9:3b:34:3d + 89:7d:83:f3:e1:c6:da:03:1f:b3:f5:0c:30:10:a3:ff + cd:cf:9d:bf:52:db:8f:d9:67:b0:a2:8f:94:97:d3:fe + 49:60:28:39:13:74:97:26:ce:28:10:b1:78:04:76:69 + exp2: - 00:8d:b1:5f:7c:94:ed:62:39:40:b6:a9:a1:cc:02:80 - c5:77:d6:9e:19:dd:79:4d:11:61:6a:79:8e:4d:92:de - bb:53:0b:3c:52:02:d5:69:3c:7d:95:1b:dc:51:2d:00 - 00:35:0a:b4:92:5a:74:c4:5f:b0:c0:02:9f:cc:2c:a5 - 29:08:93:25:9a:c5:ba:1a:a1:7a:7e:15:5e:ff:e3:ea - 07:8e:85:a2:c9:60:7f:40:bb:2c:a8:6f:0e:85:ab:a0 - 0f:b5:b0:70:1b:fe:1f:eb:66:78:fb:60:ef:71:de:40 - d9:de:cb:d9:16:40:52:12:2c:3a:b7:5a:63:fc:54:18 - e2:05:bd:d7:68:ae:b4:98:d2:2f:1c:36:13:46:5b:25 - 31:f1:28:eb:32:c3:b1:2b:e9:e4:6f:99:cd:6d:d4:80 - 3a:5d:d0:3c:18:93:b7:2c:4e:0e:fe:b1:1c:97:ba:b1 - 61:72:68:eb:6e:60:62:a5:81:b0:21:33:0a:cc:1b:a8 - 5b: + 6e:6d:c5:d5:b3:8a:aa:dd:9c:e6:5e:e6:0d:fd:20:48 + 85:1d:62:da:47:8c:1a:8d:2f:2e:b8:da:51:15:dd:54 + 7c:eb:ab:49:80:6d:39:32:e7:e6:4f:2a:2d:6a:20:43 + 02:35:26:c4:91:76:d6:b8:e8:31:2d:57:00:5d:15:f5 + a0:82:55:27:3b:88:dc:0c:c6:e1:19:87:b5:f5:03:9b + b8:36:ae:ff:bf:50:d8:63:63:34:df:3d:11:a1:ff:d3 + ed:41:ed:0b:f9:df:a4:de:19:fb:18:ae:70:6d:88:08 + 0d:95:02:a1:5c:be:7d:55:eb:74:75:d2:cb:bd:5a:05 + 23:12:d9:0e:ec:50:88:f4:07:1c:e3:1c:5e:f4:cd:69 + 97:46:97:30:a8:3c:ea:ad:72:db:de:fc:35:cc:b4:d1 + 25:0d:3b:d0:86:27:18:f6:02:37:28:c9:64:b9:86:31 + 98:58:41:13:c8:26:4b:d6:f7:a1:8d:fe:6e:e0:76:ff + Public Key PIN: - pin-sha256:iFdBnKP/7hZCLdj7qqTtdNPFjpZGka259fSYvv3X02U= + pin-sha256:Zv2mSFRUYM7ofg5obMJJxhZpnuvO7gkCOlqfDK1gzks= Public Key ID: - sha256:8857419ca3ffee16422dd8fbaaa4ed74d3c58e964691adb9f5f498befdd7d365 - sha1:aac584e0e34ca0a8a5bf9641adbd3ec1d1e075e0 + sha256:66fda648545460cee87e0e686cc249c616699eebceee09023a5a9f0cad60ce4b + sha1:155a6bfa2fc8fa5b93ff27966b88496076fedf31 -----BEGIN RSA PRIVATE KEY----- -MIIG5QIBAAKCAYEA3S9lste1HN3rWtHweizQEaeoJolcjAYBHDEmiMzLEBIVP4p4 -LhANGSBGsro5jiT+LtMQe5QaT8Gbvnj77nnjCaaEOq/VDj5WGFHUr8kDP8dVk1MB -HWs8rAbzaXISk/GVE1mD9C0X0OpBKhKQv98kuz6urjOUqjNfNERq5nkF7xKjV1dF -l8xVQwOC2T4hfz6IJ5mKyJZ+/ra21DQSK6JctqB3qu4S6gJJWLa1WzCwL8enwAjZ -GjnqaYeJa2G5p7UfzINZQUtJ70BBbrmoL/FVSCj+cykrYFhUUpvvRMyOX91kzRCf -W12I3KoKRsiFeOSVm9liE77XlM+SANcOypFSOlOstlcsasPpcjEJEtkZjpitR+wV -ONQLomtSsBHzNjtIuxg3mxSYVRIykmcj5P9IPin/QJesqtsw6NZCrPoVf9Yc/G+z -AYoeTnMdgYevPHs4qZ9VrqDNSBlJ84ePbE45HZSf0PA1q8iuZIJZhcpmqKDmoPJ/ -MG+VdYSKj/kasPCBAgMBAAECggGAFP/A+f+8tCblh1PTLuY+Qs7WCgKUhL61MEYC -UI6Q4M+2sLemvUjM1YvY6nL/r90XPL7RG8ptzRCmhqjZ0kREJ9BlUWUMJzQH3Hs4 -ZBADfPShzUDeJDrgIbzvMx2fYehXrOScwHvffPggg6wLjgvTYuuKjgNbo+UIrt+n -/oWS6KWuWEZy1vyRQ7F7pMBfUcNQDeJn6K9RE0Gpje/7oaTihHwroFDF/u2EpSWD -hkrTD1Y3OOYeJn1FIgu6IjW++IsbcpATxB/F0TS1DrLu9+G5XqIpjfluI0tQjzXI -qfPSH93OoJZQLS6vz7XhIOfp0knttQ5bPtFL8frCczobUTR+dTAG0kfSqCpFvhb7 -j2OEhbe/98TFPZVWjNECf1isTRF7xVXzyE7X2apisOMeBFyX0cricaqLM7Q06QTU -cHz0y1cZwQMj9LxNkY+ympkcbIEtTS3poePO48liUokfR4Zh8d28Ro15Cpmdqkup -CnJU29yuSL5gSnOZ2DyeB3gF34c5AoHBAOljD9dJMSeoNv6VvY0FwTVILgNPpldU -OqSVP46fKHzS369UNp58n8O5ZI/AsJY8qgH2mr6D4oUgDTPeiJevb74/U1qjdwL9 -gReRO7Itq3jb2UPbBGmCYTDklqyIi/Y/VsRJ/dXljJ0wrc/ZjVyHtSdLCY4Z7eIR -P2myR75wORFBo9u7uQ7ke1DQ0sKJgTa5a6b+lFsGZubthlJCXqkOGNsY+RQhPeA8 -jXnD9dLMUWX7HEntCtUzmTQW+R1oSnjaXwKBwQDyna5fvbejh6eNMEYGixWp5alY -HCs6fng1NlYxQt9Gh+hXDW6Z3s/7qHIWcUuzre10B8vPfSsSiWbED4rq4zcXLHWS -EXqm2ikkM5tpwmRoA9sx3v4dok2dkZ/wULiP0CIRubCVmF5lv0WXmzXymCdGfLKG -63uLV/LDSUd9AUqasOZnBeVheqtjyMvYRGmIcqWpYIlg3+bZTRYrNXsgAPM80Xj5 -IutIw394Y+Y0YEgwZgK7OMKULrmGsi+aTxd/4R8CgcEAmRYtkd2krIqeaCf4icQ4 -k6ag5/Ma/TV2sfZkFj035Yi8yNjIavT8Jvo4iEKwkhuAuID1x/niX8hCYL+bgUPG -XFhVaKLIseFvB/Jv4dQrIb+zp9rF7h9jeRq36rw2cnPhiieupNtJfOItYKUnIIaz -wO5rehZv/1Wo7r/OZ5BdHoCb5sof/TDJ4pzXYlunsim1/3gGAB8W6GrtLI/0X5er -niunVhjn6WpOsoxjdr4mtmociDFAZdDOsWhQR4XdM6CpAoHBAI2xX3yU7WI5QLap -ocwCgMV31p4Z3XlNEWFqeY5Nkt67Uws8UgLVaTx9lRvcUS0AADUKtJJadMRfsMAC -n8wspSkIkyWaxboaoXp+FV7/4+oHjoWiyWB/QLssqG8OhaugD7WwcBv+H+tmePtg -73HeQNney9kWQFISLDq3WmP8VBjiBb3XaK60mNIvHDYTRlslMfEo6zLDsSvp5G+Z -zW3UgDpd0DwYk7csTg7+sRyXurFhcmjrbmBipYGwITMKzBuoWwKBwQCTPny56odS -N/rVCjb74dD8Yk0AC62o+700U5bCbKFqSbegJDMWlXkUrLt1jXjpEPq+RGBYlEqc -umQdhieLf1FNgLD/epHATaSq0fF5fY9xSRJz1ERfDC5VptkTuDvl3OEUmH7rW2Ct -10vawNg/v3CSU4wxaothXqN9/4Qsfe2fdCma5xT7w6uOn2BqmKuGC+r7/yAvO6d2 -AzpVu7LGnLVmNrgcf5u2Yon/atY1WAvwVScB8GeNiD90SD2/jPwFYkc= +MIIG4gIBAAKCAYEAwTeHyAW8LhiV416Guhe3uGc4p2HfRAeJ7eU5DaDJEYEHtCSC +frdf5Uv8ZSOwE8TY1Hcm52MAqxHw78vFxsDerXvl4Xc+hyg1j4o1vq4rUFvZKLsS +SUqZ2veS/EqlOLY3X8i/9l/RP+491WMNf/83JoSyYNTrNPiRz3D7o9b5sC2tb+3h +kwjZbdCvA265YT5bXiQd4VqJTSKFQ40XpmRZqsFHlRmf1yaLOzkwdO6dPMgjhYS5 +fY1woBAw5CpnJwIpd2IY7nZRBko3cxS7JFQ+Ma7UORrG3SR8h4XFHYIfr6l9mxUp +JcHcFYpdaiFBB58aXETGpbmUOLtbQoDYSjUB6yc1UJ9WkVQGHV9T8X6CG+cLdPtu +TIlcPMvBHKzB/e0rNunjQLS3h2Ta2PWpasfwrPIDoiPMANjYGtd/LWbgiUV7Azp6 +JvUa7XTy2o4Um+599yKvGQ4WFRhCJtqXOTCcWiOd1XtloZnphT32MFALc3RoMKwa +GPTHnIXbTSxmah8RAgMBAAECggGAcA79r9MrrWxS2fhDmQASbF9pKyKHM1RP+Wn8 +6dt7Yax8xEx8ZnOBqWGlcx78iqqauraUGJSBmbWhD+IVxUysmN8HlvjqicaXMbWN +sBYhRszOKGI+m8UpcCYv2CSOqFJ90Q6DzqcJm9NXhz+YX8iruqoxLhmuhB05q56y +QvZ1/2iucwD616TFPXxPVGVOHIjmwrWdoso4YUUJFwFoWvdOTcsk8eNXoZdYHrPv +V5HgHZVRjKlKTvfN/vcE8/9nrecBFNx+5ADAOFEvBNs5bPEbpKXxtFrDF9JBGlq1 +82k7uLp7WZbXssIsmt3pQs77yCL8xTOXbWiJzeW8LsydI2UYBAyDtjV+FgmW0Uhh +MbHO+FDwFLpXLwIbYZy8gcHvs78v+zavGIyQQFVa/afU7TuUpt+r62zSvOOAftUG +ISibBGW1zASyROktO33eJJCN+5AtQBdRz6f67lSJj8D05MK9RJQdjfy31wVNRtxj +H3/YtIsR2ze+TekrM7lrjKfwQ1bFAoHBAPBXJf2qfpgTCCiZFuuvLiL25te930lX +F3G/Ibq/dVRaOJJkjEoQ1E93GETCefCdciYumidd50ELxmXL+oltm/uHeOKHItSS +IfU6V/qwv7tmor9Dr+hYtOKh7ZdiCQ1JykyZovTzMd+Ajla+ZJ1yWe/p202j4s95 +HpmJsvHjLbyPoCovpvAhGC3xVyBVwckYwWTGnADfslRVjf7TRqBcLvj3ELYnOkp5 +oRSxDMNyWytm1oUsflhy6zNiczTlOIcuFwKBwQDNzl37BBY09N4CfQAHPrCUjPQ6 +YgU3Gk/YQC4xEQd3CYu9dm6FuUPfP4bL223+xkzK4RbOXA7hsRANjUiZ10N/bLYg +ss0MViYCGIHhZ+XNs2Yed9xJal2MnA4kFD6hSn7PcubkA+Y4QforkXFsM7DsBzq+ +W/h09eQfnMTQ1HWoNQkFD39UTiq8zJLeHvR0ilY24LE3z7OcVwV2WWnDA97CMwzE +oU8quDwgY8lYlhriYs6/+6lRsGaZNdbSYFlyvRcCgcBcG0n3+QsjBMgvptvd3vjz +dWPqcl3MIZBeiz1F8HHqrdjYYahSCjkTazTlxRIuYGiKsXlqdNZXW0fhY1bUrCkH +MFfnmJqElKxm6sEk1e/kxeTBIBOeG8DWye/gADYv3YOl74tADKOkYAQswjKVFGnb +Q+hDzPbzRBuyA8+MW9//T5u2DyUPCd/WW5NkVPk7ND2JfYPz4cbaAx+z9QwwEKP/ +zc+dv1Lbj9lnsKKPlJfT/klgKDkTdJcmzigQsXgEdmkCgcBubcXVs4qq3ZzmXuYN +/SBIhR1i2keMGo0vLrjaURXdVHzrq0mAbTky5+ZPKi1qIEMCNSbEkXbWuOgxLVcA +XRX1oIJVJzuI3AzG4RmHtfUDm7g2rv+/UNhjYzTfPRGh/9PtQe0L+d+k3hn7GK5w +bYgIDZUCoVy+fVXrdHXSy71aBSMS2Q7sUIj0BxzjHF70zWmXRpcwqDzqrXLb3vw1 +zLTRJQ070IYnGPYCNyjJZLmGMZhYQRPIJkvW96GN/m7gdv8CgcAzagU+HkZGWOJh +OGrCj3eiJ7cZOHVA1oyHvGWmJMOX5e9wGyxOnAjKHeuXEXQUu5neIqFuvGzGJZiK +jhf0+U2jHQFeJg606ByqBnxmsYlatIJl0b8gy7JXqK9/AAcAfF7UCWAMCm6o4RYb +BJWxvCs1rYB4CgodX8nMJDpeIANQRLiw8/EX/0G4XVabHPHmK8a6oowYJYzVkPEo +Zim7QD2y+WWZLrcb49DSGteWcMz2dMUuv/XJYMD/OPio2xp9ak4= -----END RSA PRIVATE KEY----- From f046ade489aff1e6629c33d6504243585edf0b65 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 12:09:41 +0200 Subject: [PATCH 217/235] change go.mod as first step towards go 1.25 raise minium version of go compatiblity to 1.24.9 and toolchain to be used to 1.25.3 --- go.mod | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/go.mod b/go.mod index 5a27126..1cd0acc 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,8 @@ module github.com/gocsaf/csaf/v3 -go 1.23.0 +go 1.24.9 -toolchain go1.24.4 +toolchain go1.25.3 require ( github.com/BurntSushi/toml v1.5.0 From fc012fa820d60dc7b18651dc65f1c753614a64cb Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 12:42:36 +0200 Subject: [PATCH 218/235] upgrade .github/workflows and documentation * update all .github/workflows/ to use the latest version of actions and the go versions accordingly. (Only some github actions use a floating tag for the major version.) * reduce places where the go versions are hardcoded: * refEr to docs/Development.md from README.md * use `go.mod` from itest.yml. --- .github/workflows/generate-markdown.yml | 4 ++-- .github/workflows/go-oldstable.yml | 4 ++-- .github/workflows/go.yml | 14 +++++++------- .github/workflows/itest.yml | 5 +++-- .github/workflows/release.yml | 8 +++++--- README.md | 3 ++- docs/Development.md | 2 +- 7 files changed, 22 insertions(+), 18 deletions(-) diff --git a/.github/workflows/generate-markdown.yml b/.github/workflows/generate-markdown.yml index a59c944..7d9aca0 100644 --- a/.github/workflows/generate-markdown.yml +++ b/.github/workflows/generate-markdown.yml @@ -13,8 +13,8 @@ jobs: auto-update-readme: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v5 - name: Markdown autodocs - uses: dineshsonachalam/markdown-autodocs@v1.0.4 + uses: dineshsonachalam/markdown-autodocs@v1.0.7 with: output_file_paths: '[./README.md, ./docs/*.md]' diff --git a/.github/workflows/go-oldstable.yml b/.github/workflows/go-oldstable.yml index fda6413..40eb8c2 100644 --- a/.github/workflows/go-oldstable.yml +++ b/.github/workflows/go-oldstable.yml @@ -12,10 +12,10 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: 'oldstable' diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index b3f5389..d3d9522 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -13,10 +13,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: "stable" @@ -27,12 +27,12 @@ jobs: run: go vet ./... - name: gofmt - uses: Jerome1337/gofmt-action@v1.0.4 + uses: Jerome1337/gofmt-action@v1.0.5 with: gofmt-flags: "-l -d" - name: Revive Action - uses: morphy2k/revive-action@v2.7.4 + uses: morphy2k/revive-action@v2 - name: Tests run: go test -v ./... @@ -46,17 +46,17 @@ jobs: pull-requests: write # Modver needs to write comments/status on PRs steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 # Modver needs full history for comparison - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: "stable" - name: Modver - uses: bobg/modver@v2.11.0 + uses: bobg/modver@v2.12.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} pull_request_url: https://github.com/${{ github.repository }}/pull/${{ github.event.number }} diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index a99c269..6d32009 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -6,9 +6,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: "^1.23.6" + go-version-file: "go.mod" + check-latest: true - name: Set up Node.js uses: actions/setup-node@v3 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f77c9e3..52406e8 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,17 +7,19 @@ on: jobs: releases-matrix: name: Release Go binaries + # use oldest available ubuntu to be compatible with more libc.so revs. runs-on: ubuntu-22.04 permissions: contents: write steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: '^1.23.6' + go-version: '^1.24.9' + check-latest: true - name: Build run: make dist diff --git a/README.md b/README.md index 897dfe0..54543a7 100644 --- a/README.md +++ b/README.md @@ -78,7 +78,8 @@ Download the binaries from the most recent release assets on Github. ### Build from sources -- A recent version of **Go** (1.23+) should be installed. [Go installation](https://go.dev/doc/install) +- Needs a [supported version](docs/Development.md) of **Go** to be installed. + [Go installation](https://go.dev/doc/install) - Clone the repository `git clone https://github.com/gocsaf/csaf.git ` diff --git a/docs/Development.md b/docs/Development.md index bc71c2c..f05d4d0 100644 --- a/docs/Development.md +++ b/docs/Development.md @@ -3,7 +3,7 @@ ## Supported Go versions We support the latest version and the one before -the latest version of Go (currently 1.23 and 1.24). +the latest version of Go (currently 1.24 and 1.25). ## Generated files From 223570ac9bde5648e59044a05edde0fb80981fac Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 13:00:33 +0200 Subject: [PATCH 219/235] fix itest.yml: checkout before refer to go.mod --- .github/workflows/itest.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index 6d32009..878d1a3 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -5,6 +5,9 @@ jobs: build: runs-on: ubuntu-latest steps: + - name: Checkout + uses: actions/checkout@v5 + - name: Set up Go uses: actions/setup-go@v6 with: @@ -12,13 +15,10 @@ jobs: check-latest: true - name: Set up Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v6 with: node-version: 24 - - name: Checkout - uses: actions/checkout@v4 - - name: Execute the scripts run: | sudo apt update From ef44c92f8b763e467f7b4cdc834c41f79155d438 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 13:17:28 +0200 Subject: [PATCH 220/235] improve code cleanness: use format string w error and thus makes newer go test versions happy --- cmd/csaf_checker/processor.go | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index e427b44..584684c 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -254,14 +254,12 @@ func (p *processor) run(domains []string) (*Report, error) { if !p.checkProviderMetadata(d) { // We need to fail the domain if the PMD cannot be parsed. p.badProviderMetadata.use() - message := fmt.Sprintf("Could not parse the Provider-Metadata.json of: %s", d) - p.badProviderMetadata.error(message) + p.badProviderMetadata.error("Could not parse the Provider-Metadata.json of: %s", d) } if err := p.checkDomain(d); err != nil { p.badProviderMetadata.use() - message := fmt.Sprintf("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) - p.badProviderMetadata.error(message) + p.badProviderMetadata.error("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) } domain := &Domain{Name: d} From ffb1a3194429be20311db025956c5629962c2647 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 13:22:37 +0200 Subject: [PATCH 221/235] update go dependencies --- go.mod | 16 ++++++++-------- go.sum | 18 ++++++++++++++++++ 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/go.mod b/go.mod index 1cd0acc..64f6e97 100644 --- a/go.mod +++ b/go.mod @@ -10,14 +10,14 @@ require ( github.com/Intevation/jsonpath v0.2.1 github.com/ProtonMail/gopenpgp/v2 v2.9.0 github.com/PuerkitoBio/goquery v1.10.3 - github.com/gofrs/flock v0.12.1 + github.com/gofrs/flock v0.13.0 github.com/jessevdk/go-flags v1.6.1 github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 - go.etcd.io/bbolt v1.4.1 - golang.org/x/crypto v0.39.0 - golang.org/x/term v0.32.0 - golang.org/x/time v0.12.0 + go.etcd.io/bbolt v1.4.3 + golang.org/x/crypto v0.43.0 + golang.org/x/term v0.36.0 + golang.org/x/time v0.14.0 ) require ( @@ -27,7 +27,7 @@ require ( github.com/cloudflare/circl v1.6.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.4.0 // indirect - golang.org/x/net v0.41.0 // indirect - golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.26.0 // indirect + golang.org/x/net v0.46.0 // indirect + golang.org/x/sys v0.37.0 // indirect + golang.org/x/text v0.30.0 // indirect ) diff --git a/go.sum b/go.sum index 1f5b5b4..60931c3 100644 --- a/go.sum +++ b/go.sum @@ -22,6 +22,8 @@ github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxK github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= +github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw= +github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= @@ -40,9 +42,12 @@ github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.etcd.io/bbolt v1.4.1 h1:5mOV+HWjIPLEAlUGMsveaUvK2+byZMFOzojoi7bh7uI= go.etcd.io/bbolt v1.4.1/go.mod h1:c8zu2BnXWTu2XM4XcICtbGSl9cFwsXtcf9zLt2OncM8= +go.etcd.io/bbolt v1.4.3 h1:dEadXpI6G79deX5prL3QRNP6JB8UxVkqo4UPnHaNXJo= +go.etcd.io/bbolt v1.4.3/go.mod h1:tKQlpPaYCVFctUIgFKFnAlvbmB3tpy1vkTnDWohtc0E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= @@ -51,6 +56,8 @@ golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= +golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -67,6 +74,8 @@ golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= +golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -76,6 +85,7 @@ golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -89,6 +99,8 @@ golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= +golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -100,6 +112,8 @@ golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= +golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= +golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= @@ -112,8 +126,12 @@ golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= +golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= From 6cc1d7a38f67ee38e982acf8ba95d1432176dc9d Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 13:55:14 +0200 Subject: [PATCH 222/235] cleanup some dependencies with go mod tidy --- go.sum | 22 ++-------------------- 1 file changed, 2 insertions(+), 20 deletions(-) diff --git a/go.sum b/go.sum index 60931c3..ecd3d7e 100644 --- a/go.sum +++ b/go.sum @@ -20,8 +20,6 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= -github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= -github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw= github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= @@ -40,12 +38,9 @@ github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCw github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.etcd.io/bbolt v1.4.1 h1:5mOV+HWjIPLEAlUGMsveaUvK2+byZMFOzojoi7bh7uI= -go.etcd.io/bbolt v1.4.1/go.mod h1:c8zu2BnXWTu2XM4XcICtbGSl9cFwsXtcf9zLt2OncM8= go.etcd.io/bbolt v1.4.3 h1:dEadXpI6G79deX5prL3QRNP6JB8UxVkqo4UPnHaNXJo= go.etcd.io/bbolt v1.4.3/go.mod h1:tKQlpPaYCVFctUIgFKFnAlvbmB3tpy1vkTnDWohtc0E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -54,8 +49,6 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= -golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= @@ -72,8 +65,6 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= -golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -83,9 +74,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= -golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= +golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -97,8 +87,6 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= @@ -110,8 +98,6 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= -golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= -golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -124,12 +110,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= -golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= -golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= -golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= From 8740244dd82ebe482ebb1698f4328ef41062f6ff Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 14:12:16 +0200 Subject: [PATCH 223/235] fix .github/workflows action versions --- .github/workflows/go-oldstable.yml | 4 ++-- .github/workflows/go.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/go-oldstable.yml b/.github/workflows/go-oldstable.yml index 40eb8c2..75fd280 100644 --- a/.github/workflows/go-oldstable.yml +++ b/.github/workflows/go-oldstable.yml @@ -12,10 +12,10 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v6 + - uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: go-version: 'oldstable' diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index d3d9522..6d32f74 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -13,10 +13,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: go-version: "stable" From b6281012f56003645e8ee32f7409d064fc874e11 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 14:15:32 +0200 Subject: [PATCH 224/235] fix go action versions --- .github/workflows/go.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 6d32f74..a9cdcf2 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -46,12 +46,12 @@ jobs: pull-requests: write # Modver needs to write comments/status on PRs steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@v5 with: fetch-depth: 0 # Modver needs full history for comparison - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: go-version: "stable" From cf9c62fcc0e089cea4c49428987c5b6dfe5996aa Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 16:09:18 +0200 Subject: [PATCH 225/235] silence revive linter warnings that we cannot or do not want to fix yet --- csaf/advisory.go | 39 ++++++++++++++++++++++----------------- internal/misc/mime.go | 2 +- util/client.go | 2 +- util/csv.go | 2 +- util/csv_test.go | 2 +- util/set.go | 2 +- util/url_test.go | 3 ++- 7 files changed, 29 insertions(+), 23 deletions(-) diff --git a/csaf/advisory.go b/csaf/advisory.go index cc2516a..61c9a65 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -444,10 +444,11 @@ var csafFlagLabelPattern = alternativesUnmarshal( // machine readable flag. For example, this could be a machine readable justification // code why a product is not affected. type Flag struct { - Date *string `json:"date,omitempty"` - GroupIDs *ProductGroups `json:"group_ids,omitempty"` - Label *FlagLabel `json:"label"` // required - ProductIds *Products `json:"product_ids,omitempty"` + Date *string `json:"date,omitempty"` + GroupIDs *ProductGroups `json:"group_ids,omitempty"` + Label *FlagLabel `json:"label"` // required + //revive:disable-next-line:var-naming until new major version w fix + ProductIds *Products `json:"product_ids,omitempty"` } // Flags is a list if Flag elements. @@ -606,14 +607,16 @@ type RestartRequired struct { // Remediation specifies details on how to handle (and presumably, fix) a vulnerability. type Remediation struct { - Category *RemediationCategory `json:"category"` // required - Date *string `json:"date,omitempty"` - Details *string `json:"details"` // required - Entitlements []*string `json:"entitlements,omitempty"` - GroupIds *ProductGroups `json:"group_ids,omitempty"` - ProductIds *Products `json:"product_ids,omitempty"` - RestartRequired *RestartRequired `json:"restart_required,omitempty"` - URL *string `json:"url,omitempty"` + Category *RemediationCategory `json:"category"` // required + Date *string `json:"date,omitempty"` + Details *string `json:"details"` // required + Entitlements []*string `json:"entitlements,omitempty"` + //revive:disable:var-naming until new major version w fix + GroupIds *ProductGroups `json:"group_ids,omitempty"` + ProductIds *Products `json:"product_ids,omitempty"` + //revive:enable + RestartRequired *RestartRequired `json:"restart_required,omitempty"` + URL *string `json:"url,omitempty"` } // Remediations is a list of Remediation elements. @@ -739,11 +742,13 @@ var csafThreatCategoryPattern = alternativesUnmarshal( // Threat contains information about a vulnerability that can change with time. type Threat struct { - Category *ThreatCategory `json:"category"` // required - Date *string `json:"date,omitempty"` - Details *string `json:"details"` // required - GroupIds *ProductGroups `json:"group_ids,omitempty"` - ProductIds *Products `json:"product_ids,omitempty"` + Category *ThreatCategory `json:"category"` // required + Date *string `json:"date,omitempty"` + Details *string `json:"details"` // required + //revive:disable:var-naming until new major version w fix + GroupIds *ProductGroups `json:"group_ids,omitempty"` + ProductIds *Products `json:"product_ids,omitempty"` + //revive:enable } // Threats is a list of Threat elements. diff --git a/internal/misc/mime.go b/internal/misc/mime.go index acc1ba3..3b3662d 100644 --- a/internal/misc/mime.go +++ b/internal/misc/mime.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH -package misc +package misc //revive:disable-line:var-naming import ( "fmt" diff --git a/util/client.go b/util/client.go index b4478ca..957d777 100644 --- a/util/client.go +++ b/util/client.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util +package util //revive:disable-line:var-naming import ( "context" diff --git a/util/csv.go b/util/csv.go index d84644c..6f9c0f4 100644 --- a/util/csv.go +++ b/util/csv.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util +package util //revive:disable-line:var-naming import ( "bufio" diff --git a/util/csv_test.go b/util/csv_test.go index 575d83d..0dd24c7 100644 --- a/util/csv_test.go +++ b/util/csv_test.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util +package util //revive:disable-line:var-naming import ( "bytes" diff --git a/util/set.go b/util/set.go index 61eb14b..f3d136b 100644 --- a/util/set.go +++ b/util/set.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH -package util +package util //revive:disable-line:var-naming // Set is a simple set type. type Set[K comparable] map[K]struct{} diff --git a/util/url_test.go b/util/url_test.go index dec73dc..fb2804a 100644 --- a/util/url_test.go +++ b/util/url_test.go @@ -6,7 +6,8 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util +package util //revive:disable-line:var-naming + import ( "net/url" From fb59a4060983a3a5d3800cb29c3855372fba9c11 Mon Sep 17 00:00:00 2001 From: Bernhard Reiter Date: Thu, 23 Oct 2025 16:19:13 +0200 Subject: [PATCH 226/235] fix code formatting --- internal/misc/mime.go | 2 +- util/client.go | 2 +- util/csv.go | 2 +- util/csv_test.go | 2 +- util/set.go | 2 +- util/url_test.go | 3 +-- 6 files changed, 6 insertions(+), 7 deletions(-) diff --git a/internal/misc/mime.go b/internal/misc/mime.go index 3b3662d..5bb36d0 100644 --- a/internal/misc/mime.go +++ b/internal/misc/mime.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH -package misc //revive:disable-line:var-naming +package misc //revive:disable-line:var-naming import ( "fmt" diff --git a/util/client.go b/util/client.go index 957d777..b82bc54 100644 --- a/util/client.go +++ b/util/client.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util //revive:disable-line:var-naming +package util //revive:disable-line:var-naming import ( "context" diff --git a/util/csv.go b/util/csv.go index 6f9c0f4..cffaf52 100644 --- a/util/csv.go +++ b/util/csv.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util //revive:disable-line:var-naming +package util //revive:disable-line:var-naming import ( "bufio" diff --git a/util/csv_test.go b/util/csv_test.go index 0dd24c7..68b5a3e 100644 --- a/util/csv_test.go +++ b/util/csv_test.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util //revive:disable-line:var-naming +package util //revive:disable-line:var-naming import ( "bytes" diff --git a/util/set.go b/util/set.go index f3d136b..1a625da 100644 --- a/util/set.go +++ b/util/set.go @@ -6,7 +6,7 @@ // SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) // Software-Engineering: 2023 Intevation GmbH -package util //revive:disable-line:var-naming +package util //revive:disable-line:var-naming // Set is a simple set type. type Set[K comparable] map[K]struct{} diff --git a/util/url_test.go b/util/url_test.go index fb2804a..bcf219e 100644 --- a/util/url_test.go +++ b/util/url_test.go @@ -6,8 +6,7 @@ // SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) // Software-Engineering: 2022 Intevation GmbH -package util //revive:disable-line:var-naming - +package util //revive:disable-line:var-naming import ( "net/url" From 46118544bed76824dc542f785d9e30c25fa24b6f Mon Sep 17 00:00:00 2001 From: "Bernhard E. Reiter" Date: Mon, 27 Oct 2025 10:35:38 +0100 Subject: [PATCH 227/235] upgrade dependencies, including go (#695) * change go.mod as first step towards go 1.25 raise minium version of go compatiblity to 1.24.9 and toolchain to be used to 1.25.3 * upgrade .github/workflows and documentation * update all .github/workflows/ to use the latest version of actions and the go versions accordingly. (Only some github actions use a floating tag for the major version.) * reduce places where the go versions are hardcoded: * refEr to docs/Development.md from README.md * use `go.mod` from itest.yml. * fix itest.yml: checkout before refer to go.mod * improve code cleanness: use format string w error and thus makes newer go test versions happy * update go dependencies * cleanup some dependencies with go mod tidy * fix .github/workflows action versions * fix go action versions --- .github/workflows/generate-markdown.yml | 4 +-- .github/workflows/go-oldstable.yml | 4 +-- .github/workflows/go.yml | 14 ++++----- .github/workflows/itest.yml | 13 ++++---- .github/workflows/release.yml | 8 +++-- README.md | 3 +- cmd/csaf_checker/processor.go | 6 ++-- docs/Development.md | 2 +- go.mod | 20 ++++++------- go.sum | 40 ++++++++++++------------- 10 files changed, 58 insertions(+), 56 deletions(-) diff --git a/.github/workflows/generate-markdown.yml b/.github/workflows/generate-markdown.yml index a59c944..7d9aca0 100644 --- a/.github/workflows/generate-markdown.yml +++ b/.github/workflows/generate-markdown.yml @@ -13,8 +13,8 @@ jobs: auto-update-readme: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v5 - name: Markdown autodocs - uses: dineshsonachalam/markdown-autodocs@v1.0.4 + uses: dineshsonachalam/markdown-autodocs@v1.0.7 with: output_file_paths: '[./README.md, ./docs/*.md]' diff --git a/.github/workflows/go-oldstable.yml b/.github/workflows/go-oldstable.yml index fda6413..75fd280 100644 --- a/.github/workflows/go-oldstable.yml +++ b/.github/workflows/go-oldstable.yml @@ -12,10 +12,10 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v6 with: go-version: 'oldstable' diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index b3f5389..a9cdcf2 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -13,10 +13,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v6 with: go-version: "stable" @@ -27,12 +27,12 @@ jobs: run: go vet ./... - name: gofmt - uses: Jerome1337/gofmt-action@v1.0.4 + uses: Jerome1337/gofmt-action@v1.0.5 with: gofmt-flags: "-l -d" - name: Revive Action - uses: morphy2k/revive-action@v2.7.4 + uses: morphy2k/revive-action@v2 - name: Tests run: go test -v ./... @@ -46,17 +46,17 @@ jobs: pull-requests: write # Modver needs to write comments/status on PRs steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 # Modver needs full history for comparison - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v6 with: go-version: "stable" - name: Modver - uses: bobg/modver@v2.11.0 + uses: bobg/modver@v2.12.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} pull_request_url: https://github.com/${{ github.repository }}/pull/${{ github.event.number }} diff --git a/.github/workflows/itest.yml b/.github/workflows/itest.yml index a99c269..878d1a3 100644 --- a/.github/workflows/itest.yml +++ b/.github/workflows/itest.yml @@ -5,19 +5,20 @@ jobs: build: runs-on: ubuntu-latest steps: + - name: Checkout + uses: actions/checkout@v5 + - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: "^1.23.6" + go-version-file: "go.mod" + check-latest: true - name: Set up Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v6 with: node-version: 24 - - name: Checkout - uses: actions/checkout@v4 - - name: Execute the scripts run: | sudo apt update diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f77c9e3..52406e8 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,17 +7,19 @@ on: jobs: releases-matrix: name: Release Go binaries + # use oldest available ubuntu to be compatible with more libc.so revs. runs-on: ubuntu-22.04 permissions: contents: write steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: - go-version: '^1.23.6' + go-version: '^1.24.9' + check-latest: true - name: Build run: make dist diff --git a/README.md b/README.md index 897dfe0..54543a7 100644 --- a/README.md +++ b/README.md @@ -78,7 +78,8 @@ Download the binaries from the most recent release assets on Github. ### Build from sources -- A recent version of **Go** (1.23+) should be installed. [Go installation](https://go.dev/doc/install) +- Needs a [supported version](docs/Development.md) of **Go** to be installed. + [Go installation](https://go.dev/doc/install) - Clone the repository `git clone https://github.com/gocsaf/csaf.git ` diff --git a/cmd/csaf_checker/processor.go b/cmd/csaf_checker/processor.go index e427b44..584684c 100644 --- a/cmd/csaf_checker/processor.go +++ b/cmd/csaf_checker/processor.go @@ -254,14 +254,12 @@ func (p *processor) run(domains []string) (*Report, error) { if !p.checkProviderMetadata(d) { // We need to fail the domain if the PMD cannot be parsed. p.badProviderMetadata.use() - message := fmt.Sprintf("Could not parse the Provider-Metadata.json of: %s", d) - p.badProviderMetadata.error(message) + p.badProviderMetadata.error("Could not parse the Provider-Metadata.json of: %s", d) } if err := p.checkDomain(d); err != nil { p.badProviderMetadata.use() - message := fmt.Sprintf("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) - p.badProviderMetadata.error(message) + p.badProviderMetadata.error("Failed to find valid provider-metadata.json for domain %s: %v. ", d, err) } domain := &Domain{Name: d} diff --git a/docs/Development.md b/docs/Development.md index bc71c2c..f05d4d0 100644 --- a/docs/Development.md +++ b/docs/Development.md @@ -3,7 +3,7 @@ ## Supported Go versions We support the latest version and the one before -the latest version of Go (currently 1.23 and 1.24). +the latest version of Go (currently 1.24 and 1.25). ## Generated files diff --git a/go.mod b/go.mod index 5a27126..64f6e97 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,8 @@ module github.com/gocsaf/csaf/v3 -go 1.23.0 +go 1.24.9 -toolchain go1.24.4 +toolchain go1.25.3 require ( github.com/BurntSushi/toml v1.5.0 @@ -10,14 +10,14 @@ require ( github.com/Intevation/jsonpath v0.2.1 github.com/ProtonMail/gopenpgp/v2 v2.9.0 github.com/PuerkitoBio/goquery v1.10.3 - github.com/gofrs/flock v0.12.1 + github.com/gofrs/flock v0.13.0 github.com/jessevdk/go-flags v1.6.1 github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 - go.etcd.io/bbolt v1.4.1 - golang.org/x/crypto v0.39.0 - golang.org/x/term v0.32.0 - golang.org/x/time v0.12.0 + go.etcd.io/bbolt v1.4.3 + golang.org/x/crypto v0.43.0 + golang.org/x/term v0.36.0 + golang.org/x/time v0.14.0 ) require ( @@ -27,7 +27,7 @@ require ( github.com/cloudflare/circl v1.6.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.4.0 // indirect - golang.org/x/net v0.41.0 // indirect - golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.26.0 // indirect + golang.org/x/net v0.46.0 // indirect + golang.org/x/sys v0.37.0 // indirect + golang.org/x/text v0.30.0 // indirect ) diff --git a/go.sum b/go.sum index 1f5b5b4..ecd3d7e 100644 --- a/go.sum +++ b/go.sum @@ -20,8 +20,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= -github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= -github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= +github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw= +github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= @@ -38,19 +38,19 @@ github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCw github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.etcd.io/bbolt v1.4.1 h1:5mOV+HWjIPLEAlUGMsveaUvK2+byZMFOzojoi7bh7uI= -go.etcd.io/bbolt v1.4.1/go.mod h1:c8zu2BnXWTu2XM4XcICtbGSl9cFwsXtcf9zLt2OncM8= +go.etcd.io/bbolt v1.4.3 h1:dEadXpI6G79deX5prL3QRNP6JB8UxVkqo4UPnHaNXJo= +go.etcd.io/bbolt v1.4.3/go.mod h1:tKQlpPaYCVFctUIgFKFnAlvbmB3tpy1vkTnDWohtc0E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= -golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= +golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -65,8 +65,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= -golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= +golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -74,8 +74,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= -golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= +golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -87,8 +87,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= +golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -98,8 +98,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= -golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= -golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= +golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= +golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= @@ -110,10 +110,10 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= -golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= -golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= -golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= +golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= From 9607f8db94c5078f29fd46575c3fc0cdfb527c40 Mon Sep 17 00:00:00 2001 From: Christoph Klassen <100708552+cintek@users.noreply.github.com> Date: Mon, 27 Oct 2025 10:38:22 +0100 Subject: [PATCH 228/235] fix: Documentation about supported options (#697) --- docs/csaf_downloader.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/csaf_downloader.md b/docs/csaf_downloader.md index e95bc62..123694e 100644 --- a/docs/csaf_downloader.md +++ b/docs/csaf_downloader.md @@ -75,7 +75,7 @@ insecure = false # client_cert # not set by default # client_key # not set by default # client_passphrase # not set by default -ignoresigcheck = false +ignore_sigcheck = false # rate # set to unlimited worker = 2 # time_range # not set by default From 5a1c2a08735444720d999dbdcde5a9f529c6c3d9 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Wed, 19 Nov 2025 12:12:43 +0100 Subject: [PATCH 229/235] Add category field to ROLIE feed model. --- csaf/rolie.go | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/csaf/rolie.go b/csaf/rolie.go index d3a5ac7..9351386 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -9,8 +9,10 @@ package csaf import ( + "bytes" "encoding/json" "io" + "os" "sort" "time" @@ -169,14 +171,15 @@ type Format struct { // Entry for ROLIE. type Entry struct { - ID string `json:"id"` - Titel string `json:"title"` - Link []Link `json:"link"` - Published TimeStamp `json:"published"` - Updated TimeStamp `json:"updated"` - Summary *Summary `json:"summary,omitempty"` - Content Content `json:"content"` - Format Format `json:"format"` + ID string `json:"id"` + Titel string `json:"title"` + Link []Link `json:"link"` + Published TimeStamp `json:"published"` + Updated TimeStamp `json:"updated"` + Summary *Summary `json:"summary,omitempty"` + Content Content `json:"content"` + Format Format `json:"format"` + Category []ROLIECategory `json:"category,omitempty"` } // FeedData is the content of the ROLIE feed. @@ -196,6 +199,14 @@ type ROLIEFeed struct { // LoadROLIEFeed loads a ROLIE feed from a reader. func LoadROLIEFeed(r io.Reader) (*ROLIEFeed, error) { + all, err := io.ReadAll(r) + if err != nil { + return nil, err + } + if err := os.WriteFile("rolie.json", all, 060); err != nil { + return nil, err + } + r = bytes.NewReader(all) var rf ROLIEFeed if err := misc.StrictJSONParse(r, &rf); err != nil { return nil, err From d6bac95e454665b8d5c040b92f72cbb0f1656a74 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Wed, 19 Nov 2025 12:56:04 +0100 Subject: [PATCH 230/235] Removed debugging code --- csaf/rolie.go | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/csaf/rolie.go b/csaf/rolie.go index 9351386..d023028 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -9,10 +9,8 @@ package csaf import ( - "bytes" "encoding/json" "io" - "os" "sort" "time" @@ -199,14 +197,6 @@ type ROLIEFeed struct { // LoadROLIEFeed loads a ROLIE feed from a reader. func LoadROLIEFeed(r io.Reader) (*ROLIEFeed, error) { - all, err := io.ReadAll(r) - if err != nil { - return nil, err - } - if err := os.WriteFile("rolie.json", all, 060); err != nil { - return nil, err - } - r = bytes.NewReader(all) var rf ROLIEFeed if err := misc.StrictJSONParse(r, &rf); err != nil { return nil, err From 9a37a8ecfa695dbd973cb9e3dacc2049f14c109a Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 27 Nov 2025 15:23:34 +0100 Subject: [PATCH 231/235] Add more fields to rolie entry. --- csaf/rolie.go | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/csaf/rolie.go b/csaf/rolie.go index d023028..2b7d6fd 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -169,15 +169,22 @@ type Format struct { // Entry for ROLIE. type Entry struct { - ID string `json:"id"` - Titel string `json:"title"` - Link []Link `json:"link"` - Published TimeStamp `json:"published"` - Updated TimeStamp `json:"updated"` - Summary *Summary `json:"summary,omitempty"` - Content Content `json:"content"` - Format Format `json:"format"` - Category []ROLIECategory `json:"category,omitempty"` + Base *string `json:"base,omitempty"` + LanguageTag *string `json:"lang,omitempty"` + Author *json.RawMessage `json:"author,omitempty"` + Category []ROLIECategory `json:"category,omitempty"` + Content Content `json:"content"` + Contributor *json.RawMessage `json:"contibutor,omitempty"` + ID string `json:"id"` + Link []Link `json:"link"` + Published TimeStamp `json:"published"` + Rights *json.RawMessage `json:"rights,omitempty"` + Source *json.RawMessage `json:"source,omitempty"` + Summary *Summary `json:"summary,omitempty"` + Titel string `json:"title"` + Updated TimeStamp `json:"updated"` + Format Format `json:"format"` + Property *json.RawMessage `json:"property,omitempty"` } // FeedData is the content of the ROLIE feed. From c678a97d4307b8b1defb78f79b6115c119545cf5 Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Fri, 28 Nov 2025 11:03:29 +0100 Subject: [PATCH 232/235] Update 3rd party libraries --- go.mod | 12 ++++++------ go.sum | 28 ++++++++++++++-------------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/go.mod b/go.mod index 64f6e97..c8ed550 100644 --- a/go.mod +++ b/go.mod @@ -9,14 +9,14 @@ require ( github.com/Intevation/gval v1.3.0 github.com/Intevation/jsonpath v0.2.1 github.com/ProtonMail/gopenpgp/v2 v2.9.0 - github.com/PuerkitoBio/goquery v1.10.3 + github.com/PuerkitoBio/goquery v1.11.0 github.com/gofrs/flock v0.13.0 github.com/jessevdk/go-flags v1.6.1 github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 go.etcd.io/bbolt v1.4.3 - golang.org/x/crypto v0.43.0 - golang.org/x/term v0.36.0 + golang.org/x/crypto v0.45.0 + golang.org/x/term v0.37.0 golang.org/x/time v0.14.0 ) @@ -27,7 +27,7 @@ require ( github.com/cloudflare/circl v1.6.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.4.0 // indirect - golang.org/x/net v0.46.0 // indirect - golang.org/x/sys v0.37.0 // indirect - golang.org/x/text v0.30.0 // indirect + golang.org/x/net v0.47.0 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/text v0.31.0 // indirect ) diff --git a/go.sum b/go.sum index ecd3d7e..5328cba 100644 --- a/go.sum +++ b/go.sum @@ -10,8 +10,8 @@ github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ek github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw= github.com/ProtonMail/gopenpgp/v2 v2.9.0 h1:ruLzBmwe4dR1hdnrsEJ/S7psSBmV15gFttFUPP/+/kE= github.com/ProtonMail/gopenpgp/v2 v2.9.0/go.mod h1:IldDyh9Hv1ZCCYatTuuEt1XZJ0OPjxLpTarDfglih7s= -github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo= -github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y= +github.com/PuerkitoBio/goquery v1.11.0 h1:jZ7pwMQXIITcUXNH83LLk+txlaEy6NVOfTuP43xxfqw= +github.com/PuerkitoBio/goquery v1.11.0/go.mod h1:wQHgxUOU3JGuj3oD/QFfxUdlzW6xPHfqyHre6VMY4DQ= github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM= github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA= github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= @@ -49,8 +49,8 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= -golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -65,8 +65,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= -golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -74,8 +74,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= -golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -87,8 +87,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= -golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -98,8 +98,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= -golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= -golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= @@ -110,8 +110,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= -golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= From 502376ce3a4104d62b7614557f53a1c38ad62f3c Mon Sep 17 00:00:00 2001 From: JanHoefelmeyer Date: Fri, 28 Nov 2025 16:12:10 +0100 Subject: [PATCH 233/235] fix typo: contibutor -> contributor --- csaf/rolie.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/csaf/rolie.go b/csaf/rolie.go index 2b7d6fd..84b916c 100644 --- a/csaf/rolie.go +++ b/csaf/rolie.go @@ -174,7 +174,7 @@ type Entry struct { Author *json.RawMessage `json:"author,omitempty"` Category []ROLIECategory `json:"category,omitempty"` Content Content `json:"content"` - Contributor *json.RawMessage `json:"contibutor,omitempty"` + Contributor *json.RawMessage `json:"contributor,omitempty"` ID string `json:"id"` Link []Link `json:"link"` Published TimeStamp `json:"published"` From 52ce6bcde6f4a2c22eefc021b27f99866bff9d58 Mon Sep 17 00:00:00 2001 From: Benjamin Grandfond Date: Thu, 18 Dec 2025 12:50:37 +0100 Subject: [PATCH 234/235] fix: engine is invalid when name is missing (#710) --- csaf/advisory.go | 4 +- csaf/advisory_test.go | 11 +- ...dvisory-tracking-generator-no-version.json | 169 ++++++++++++++++++ 3 files changed, 177 insertions(+), 7 deletions(-) create mode 100644 testdata/csaf-documents/valid/advisory-tracking-generator-no-version.json diff --git a/csaf/advisory.go b/csaf/advisory.go index 61c9a65..159b980 100644 --- a/csaf/advisory.go +++ b/csaf/advisory.go @@ -891,8 +891,8 @@ func (rs Revisions) Validate() error { // Validate validates an Engine. func (e *Engine) Validate() error { - if e.Version == nil { - return errors.New("'version' is missing") + if e.Name == nil { + return errors.New("'name' is missing") } return nil } diff --git a/csaf/advisory_test.go b/csaf/advisory_test.go index 9a82884..c53834b 100644 --- a/csaf/advisory_test.go +++ b/csaf/advisory_test.go @@ -14,11 +14,12 @@ func TestLoadAdvisory(t *testing.T) { name string args args wantErr bool - }{{ - name: "Valid documents", - args: args{jsonDir: "csaf-documents/valid"}, - wantErr: false, - }, + }{ + { + name: "Valid documents", + args: args{jsonDir: "csaf-documents/valid"}, + wantErr: false, + }, { name: "Garbage trailing data", args: args{jsonDir: "csaf-documents/trailing-garbage-data"}, diff --git a/testdata/csaf-documents/valid/advisory-tracking-generator-no-version.json b/testdata/csaf-documents/valid/advisory-tracking-generator-no-version.json new file mode 100644 index 0000000..47c9907 --- /dev/null +++ b/testdata/csaf-documents/valid/advisory-tracking-generator-no-version.json @@ -0,0 +1,169 @@ +{ + "document": { + "category": "csaf_vex", + "csaf_version": "2.0", + "distribution": { + "tlp": { + "label": "WHITE", + "url": "https://www.first.org/tlp/v1/" + } + }, + "notes": [ + { + "category": "summary", + "title": "Test document summary", + "text": "Auto generated test CSAF document" + } + ], + "publisher": { + "category": "vendor", + "name": "ACME Inc.", + "namespace": "https://www.example.com" + }, + "title": "Test CSAF document", + "tracking": { + "current_release_date": "2020-01-01T00:00:00Z", + "generator": { + "date": "2020-01-01T00:00:00Z", + "engine": { + "name": "csaf-tool" + } + }, + "id": "Avendor-advisory-0004", + "initial_release_date": "2020-01-01T00:00:00Z", + "revision_history": [ + { + "date": "2020-01-01T00:00:00Z", + "number": "1", + "summary": "Initial version" + } + ], + "status": "final", + "version": "1" + } + }, + "product_tree": { + "branches": [ + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_1", + "branches": [ + { + "category": "product_version", + "name": "1.1", + "product": { + "name": "AVendor product_1 1.1", + "product_id": "CSAFPID_0001" + } + }, + { + "category": "product_version", + "name": "1.2", + "product": { + "name": "AVendor product_1 1.2", + "product_id": "CSAFPID_0002" + } + }, + { + "category": "product_version", + "name": "2.0", + "product": { + "name": "AVendor product_1 2.0", + "product_id": "CSAFPID_0003" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor1", + "branches": [ + { + "category": "product_name", + "name": "product_2", + "branches": [ + { + "category": "product_version", + "name": "1", + "product": { + "name": "AVendor1 product_2 1", + "product_id": "CSAFPID_0004" + } + } + ] + } + ] + }, + { + "category": "vendor", + "name": "AVendor", + "branches": [ + { + "category": "product_name", + "name": "product_3", + "branches": [ + { + "category": "product_version", + "name": "2022H2", + "product": { + "name": "AVendor product_3 2022H2", + "product_id": "CSAFPID_0005" + } + } + ] + } + ] + } + ] + }, + "vulnerabilities": [ + { + "cve": "CVE-2020-1234", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-1234" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Customers should upgrade to the latest version of the product", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + }, + { + "cve": "CVE-2020-9876", + "notes": [ + { + "category": "description", + "title": "CVE description", + "text": "https://nvd.nist.gov/vuln/detail/CVE-2020-9876" + } + ], + "product_status": { + "under_investigation": ["CSAFPID_0001"] + }, + "threats": [ + { + "category": "impact", + "details": "Still under investigation", + "date": "2020-01-01T00:00:00Z", + "product_ids": ["CSAFPID_0001"] + } + ] + } + ] +} From 586524a97e42c3fa5b97fbcb4e1169ad1df064da Mon Sep 17 00:00:00 2001 From: "Sascha L. Teichmann" Date: Thu, 18 Dec 2025 13:25:44 +0100 Subject: [PATCH 235/235] Update 3rd party libraries. (#711) --- go.mod | 10 +++++----- go.sum | 24 ++++++++++++------------ 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/go.mod b/go.mod index c8ed550..8f2cd62 100644 --- a/go.mod +++ b/go.mod @@ -15,8 +15,8 @@ require ( github.com/mitchellh/go-homedir v1.1.0 github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 go.etcd.io/bbolt v1.4.3 - golang.org/x/crypto v0.45.0 - golang.org/x/term v0.37.0 + golang.org/x/crypto v0.46.0 + golang.org/x/term v0.38.0 golang.org/x/time v0.14.0 ) @@ -27,7 +27,7 @@ require ( github.com/cloudflare/circl v1.6.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/shopspring/decimal v1.4.0 // indirect - golang.org/x/net v0.47.0 // indirect - golang.org/x/sys v0.38.0 // indirect - golang.org/x/text v0.31.0 // indirect + golang.org/x/net v0.48.0 // indirect + golang.org/x/sys v0.39.0 // indirect + golang.org/x/text v0.32.0 // indirect ) diff --git a/go.sum b/go.sum index 5328cba..eeaa200 100644 --- a/go.sum +++ b/go.sum @@ -49,8 +49,8 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= -golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= +golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU= +golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -65,8 +65,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= -golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= +golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= +golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -74,8 +74,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= -golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -87,8 +87,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= -golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk= +golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -98,8 +98,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= -golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= -golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= +golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q= +golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= @@ -110,8 +110,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= -golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU= +golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY= golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=