mirror of
https://github.com/gocsaf/csaf.git
synced 2025-12-22 18:15:42 +01:00
Merge branch 'main' into quick-ckeck
This commit is contained in:
commit
efa233f2ce
10 changed files with 477 additions and 85 deletions
51
LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt
Normal file
51
LICENSES/LicenseRef-Go119-BSD-Patentgrant.txt
Normal file
|
|
@ -0,0 +1,51 @@
|
||||||
|
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
|
||||||
|
Additional IP Rights Grant (Patents)
|
||||||
|
|
||||||
|
"This implementation" means the copyrightable works distributed by
|
||||||
|
Google as part of the Go project.
|
||||||
|
|
||||||
|
Google hereby grants to You a perpetual, worldwide, non-exclusive,
|
||||||
|
no-charge, royalty-free, irrevocable (except as stated in this section)
|
||||||
|
patent license to make, have made, use, offer to sell, sell, import,
|
||||||
|
transfer and otherwise run, modify and propagate the contents of this
|
||||||
|
implementation of Go, where such license applies only to those patent
|
||||||
|
claims, both currently owned or controlled by Google and acquired in
|
||||||
|
the future, licensable by Google that are necessarily infringed by this
|
||||||
|
implementation of Go. This grant does not include claims that would be
|
||||||
|
infringed only as a consequence of further modification of this
|
||||||
|
implementation. If you or your agent or exclusive licensee institute or
|
||||||
|
order or agree to the institution of patent litigation against any
|
||||||
|
entity (including a cross-claim or counterclaim in a lawsuit) alleging
|
||||||
|
that this implementation of Go or any code incorporated within this
|
||||||
|
implementation of Go constitutes direct or contributory patent
|
||||||
|
infringement, or inducement of patent infringement, then any patent
|
||||||
|
rights granted to you under this License for this implementation of Go
|
||||||
|
shall terminate as of the date such litigation is filed.
|
||||||
|
|
@ -94,12 +94,17 @@ func (w *worker) writeCSV(label string, summaries []summary) error {
|
||||||
|
|
||||||
record := make([]string, 2)
|
record := make([]string, 2)
|
||||||
|
|
||||||
|
const (
|
||||||
|
pathColumn = 0
|
||||||
|
timeColumn = 1
|
||||||
|
)
|
||||||
|
|
||||||
for i := range ss {
|
for i := range ss {
|
||||||
s := &ss[i]
|
s := &ss[i]
|
||||||
record[0] =
|
record[pathColumn] =
|
||||||
s.summary.CurrentReleaseDate.Format(time.RFC3339)
|
|
||||||
record[1] =
|
|
||||||
strconv.Itoa(s.summary.InitialReleaseDate.Year()) + "/" + s.filename
|
strconv.Itoa(s.summary.InitialReleaseDate.Year()) + "/" + s.filename
|
||||||
|
record[timeColumn] =
|
||||||
|
s.summary.CurrentReleaseDate.Format(time.RFC3339)
|
||||||
if err := out.Write(record); err != nil {
|
if err := out.Write(record); err != nil {
|
||||||
f.Close()
|
f.Close()
|
||||||
return err
|
return err
|
||||||
|
|
|
||||||
|
|
@ -227,20 +227,43 @@ func (p *processor) run(reporters []reporter, domains []string) (*Report, error)
|
||||||
return &report, nil
|
return &report, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *processor) checkDomain(domain string) error {
|
// domainChecks compiles a list of checks which should be performed
|
||||||
|
// for a given domain.
|
||||||
|
func (p *processor) domainChecks(domain string) []func(*processor, string) error {
|
||||||
|
|
||||||
// TODO: Implement me!
|
// If we have a direct domain url we dont need to
|
||||||
for _, check := range []func(*processor, string) error{
|
// perform certain checks.
|
||||||
|
direct := strings.HasPrefix(domain, "https://")
|
||||||
|
|
||||||
|
checks := []func(*processor, string) error{
|
||||||
(*processor).checkProviderMetadata,
|
(*processor).checkProviderMetadata,
|
||||||
(*processor).checkPGPKeys,
|
(*processor).checkPGPKeys,
|
||||||
(*processor).checkSecurity,
|
}
|
||||||
|
|
||||||
|
if !direct {
|
||||||
|
checks = append(checks, (*processor).checkSecurity)
|
||||||
|
}
|
||||||
|
|
||||||
|
checks = append(checks,
|
||||||
(*processor).checkCSAFs,
|
(*processor).checkCSAFs,
|
||||||
(*processor).checkMissing,
|
(*processor).checkMissing,
|
||||||
(*processor).checkInvalid,
|
(*processor).checkInvalid,
|
||||||
(*processor).checkListing,
|
(*processor).checkListing,
|
||||||
(*processor).checkWellknownMetadataReporter,
|
)
|
||||||
(*processor).checkDNSPathReporter,
|
|
||||||
} {
|
if !direct {
|
||||||
|
checks = append(checks,
|
||||||
|
(*processor).checkWellknownMetadataReporter,
|
||||||
|
(*processor).checkDNSPathReporter,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return checks
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *processor) checkDomain(domain string) error {
|
||||||
|
|
||||||
|
for _, check := range p.domainChecks(domain) {
|
||||||
if err := check(p, domain); err != nil && err != errContinue {
|
if err := check(p, domain); err != nil && err != errContinue {
|
||||||
if err == errStop {
|
if err == errStop {
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -674,7 +697,13 @@ func (p *processor) processROLIEFeed(feed string) error {
|
||||||
func (p *processor) checkIndex(base string, mask whereType) error {
|
func (p *processor) checkIndex(base string, mask whereType) error {
|
||||||
client := p.httpClient()
|
client := p.httpClient()
|
||||||
|
|
||||||
index := base + "/index.txt"
|
bu, err := url.Parse(base)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
index := util.JoinURLPath(bu, "index.txt").String()
|
||||||
|
|
||||||
p.checkTLS(index)
|
p.checkTLS(index)
|
||||||
|
|
||||||
p.badIndices.use()
|
p.badIndices.use()
|
||||||
|
|
@ -715,9 +744,16 @@ func (p *processor) checkIndex(base string, mask whereType) error {
|
||||||
// of the fields' values and if they are sorted properly. Then it passes the files to the
|
// of the fields' values and if they are sorted properly. Then it passes the files to the
|
||||||
// "integrity" functions. It returns error if some test fails, otherwise nil.
|
// "integrity" functions. It returns error if some test fails, otherwise nil.
|
||||||
func (p *processor) checkChanges(base string, mask whereType) error {
|
func (p *processor) checkChanges(base string, mask whereType) error {
|
||||||
client := p.httpClient()
|
|
||||||
changes := base + "/changes.csv"
|
bu, err := url.Parse(base)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
changes := util.JoinURLPath(bu, "changes.csv").String()
|
||||||
|
|
||||||
p.checkTLS(changes)
|
p.checkTLS(changes)
|
||||||
|
|
||||||
|
client := p.httpClient()
|
||||||
res, err := client.Get(changes)
|
res, err := client.Get(changes)
|
||||||
|
|
||||||
p.badChanges.use()
|
p.badChanges.use()
|
||||||
|
|
@ -740,6 +776,10 @@ func (p *processor) checkChanges(base string, mask whereType) error {
|
||||||
var times []time.Time
|
var times []time.Time
|
||||||
var files []csaf.AdvisoryFile
|
var files []csaf.AdvisoryFile
|
||||||
c := csv.NewReader(res.Body)
|
c := csv.NewReader(res.Body)
|
||||||
|
const (
|
||||||
|
pathColumn = 0
|
||||||
|
timeColumn = 1
|
||||||
|
)
|
||||||
for {
|
for {
|
||||||
r, err := c.Read()
|
r, err := c.Read()
|
||||||
if err == io.EOF {
|
if err == io.EOF {
|
||||||
|
|
@ -751,7 +791,7 @@ func (p *processor) checkChanges(base string, mask whereType) error {
|
||||||
if len(r) < 2 {
|
if len(r) < 2 {
|
||||||
return nil, nil, errors.New("not enough columns")
|
return nil, nil, errors.New("not enough columns")
|
||||||
}
|
}
|
||||||
t, err := time.Parse(time.RFC3339, r[0])
|
t, err := time.Parse(time.RFC3339, r[timeColumn])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
@ -759,7 +799,9 @@ func (p *processor) checkChanges(base string, mask whereType) error {
|
||||||
if p.ageAccept != nil && !p.ageAccept(t) {
|
if p.ageAccept != nil && !p.ageAccept(t) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
times, files = append(times, t), append(files, csaf.PlainAdvisoryFile(r[1]))
|
times, files =
|
||||||
|
append(times, t),
|
||||||
|
append(files, csaf.PlainAdvisoryFile(r[pathColumn]))
|
||||||
}
|
}
|
||||||
return times, files, nil
|
return times, files, nil
|
||||||
}()
|
}()
|
||||||
|
|
|
||||||
|
|
@ -85,57 +85,13 @@ func (d *downloader) httpClient() util.Client {
|
||||||
return d.client
|
return d.client
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *downloader) loadProviderMetadataDirectly(path string) *csaf.LoadedProviderMetadata {
|
|
||||||
client := d.httpClient()
|
|
||||||
resp, err := client.Get(path)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Error fetching '%s': %v\n", path, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
log.Printf(
|
|
||||||
"Error fetching '%s': %s (%d)\n", path, resp.Status, resp.StatusCode)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
var doc interface{}
|
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&doc); err != nil {
|
|
||||||
log.Printf("Decoding '%s' as JSON failed: %v\n", path, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
errors, err := csaf.ValidateProviderMetadata(doc)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Schema validation of '%s' failed: %v\n", path, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(errors) > 0 {
|
|
||||||
log.Printf(
|
|
||||||
"Schema validation of '%s' leads to %d issues.\n", path, len(errors))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return &csaf.LoadedProviderMetadata{
|
|
||||||
Document: doc,
|
|
||||||
URL: path,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *downloader) download(domain string) error {
|
func (d *downloader) download(domain string) error {
|
||||||
|
|
||||||
var lpmd *csaf.LoadedProviderMetadata
|
lpmd := csaf.LoadProviderMetadataForDomain(
|
||||||
|
d.httpClient(), domain, func(format string, args ...interface{}) {
|
||||||
if strings.HasPrefix(domain, "https://") {
|
log.Printf(
|
||||||
lpmd = d.loadProviderMetadataDirectly(domain)
|
"Looking for provider-metadata.json of '"+domain+"': "+format+"\n", args...)
|
||||||
} else {
|
})
|
||||||
lpmd = csaf.LoadProviderMetadataForDomain(
|
|
||||||
d.httpClient(), domain, func(format string, args ...interface{}) {
|
|
||||||
log.Printf(
|
|
||||||
"Looking for provider-metadata.json of '"+domain+"': "+format+"\n", args...)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if lpmd == nil {
|
if lpmd == nil {
|
||||||
return fmt.Errorf("no provider-metadata.json found for '%s'", domain)
|
return fmt.Errorf("no provider-metadata.json found for '%s'", domain)
|
||||||
|
|
|
||||||
|
|
@ -74,6 +74,11 @@ func updateChanges(dir, fname string, releaseDate time.Time) error {
|
||||||
path string
|
path string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
pathColumn = 0
|
||||||
|
timeColumn = 1
|
||||||
|
)
|
||||||
|
|
||||||
changes := filepath.Join(dir, "changes.csv")
|
changes := filepath.Join(dir, "changes.csv")
|
||||||
|
|
||||||
chs, err := func() ([]change, error) {
|
chs, err := func() ([]change, error) {
|
||||||
|
|
@ -99,9 +104,9 @@ func updateChanges(dir, fname string, releaseDate time.Time) error {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
// Check if new is already in.
|
// Check if new is already in.
|
||||||
if record[1] == fname {
|
if record[pathColumn] == fname {
|
||||||
// Identical -> no change at all.
|
// Identical -> no change at all.
|
||||||
if record[0] == releaseDate.Format(dateFormat) {
|
if record[timeColumn] == releaseDate.Format(dateFormat) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
// replace old entry
|
// replace old entry
|
||||||
|
|
@ -109,11 +114,11 @@ func updateChanges(dir, fname string, releaseDate time.Time) error {
|
||||||
chs = append(chs, change{releaseDate, fname})
|
chs = append(chs, change{releaseDate, fname})
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
t, err := time.Parse(dateFormat, record[0])
|
t, err := time.Parse(dateFormat, record[timeColumn])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
chs = append(chs, change{t, record[1]})
|
chs = append(chs, change{t, record[pathColumn]})
|
||||||
}
|
}
|
||||||
if !replaced {
|
if !replaced {
|
||||||
chs = append(chs, change{releaseDate, fname})
|
chs = append(chs, change{releaseDate, fname})
|
||||||
|
|
@ -139,8 +144,8 @@ func updateChanges(dir, fname string, releaseDate time.Time) error {
|
||||||
c := csv.NewWriter(o)
|
c := csv.NewWriter(o)
|
||||||
record := make([]string, 2)
|
record := make([]string, 2)
|
||||||
for _, ch := range chs {
|
for _, ch := range chs {
|
||||||
record[0] = ch.time.Format(dateFormat)
|
record[timeColumn] = ch.time.Format(dateFormat)
|
||||||
record[1] = ch.path
|
record[pathColumn] = ch.path
|
||||||
if err := c.Write(record); err != nil {
|
if err := c.Write(record); err != nil {
|
||||||
o.Close()
|
o.Close()
|
||||||
return err
|
return err
|
||||||
|
|
|
||||||
38
csaf/util.go
38
csaf/util.go
|
|
@ -117,8 +117,10 @@ func LoadProviderMetadatasFromSecurity(client util.Client, path string) []*Loade
|
||||||
}
|
}
|
||||||
|
|
||||||
// LoadProviderMetadataForDomain loads a provider metadata for a given domain.
|
// LoadProviderMetadataForDomain loads a provider metadata for a given domain.
|
||||||
// Returns nil if no provider metadata was found.
|
// Returns nil if no provider metadata (PMD) was found.
|
||||||
// The logging can be use to track the errors happening while loading.
|
// If the domain starts with `https://` it only attemps to load
|
||||||
|
// the data from that URL.
|
||||||
|
// The logging can be used to track the errors happening while loading.
|
||||||
func LoadProviderMetadataForDomain(
|
func LoadProviderMetadataForDomain(
|
||||||
client util.Client,
|
client util.Client,
|
||||||
domain string,
|
domain string,
|
||||||
|
|
@ -131,22 +133,33 @@ func LoadProviderMetadataForDomain(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lg := func(result *LoadedProviderMetadata, url string) {
|
||||||
|
if result == nil {
|
||||||
|
logging("%s not found.", url)
|
||||||
|
} else {
|
||||||
|
for _, msg := range result.Messages {
|
||||||
|
logging(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check direct path
|
||||||
|
if strings.HasPrefix(domain, "https://") {
|
||||||
|
result := LoadProviderMetadataFromURL(client, domain)
|
||||||
|
lg(result, domain)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
// Valid provider metadata under well-known.
|
// Valid provider metadata under well-known.
|
||||||
var wellknownGood *LoadedProviderMetadata
|
var wellknownGood *LoadedProviderMetadata
|
||||||
|
|
||||||
// First try well-know path
|
// First try well-know path
|
||||||
wellknownURL := "https://" + domain + "/.well-known/csaf/provider-metadata.json"
|
wellknownURL := "https://" + domain + "/.well-known/csaf/provider-metadata.json"
|
||||||
wellknownResult := LoadProviderMetadataFromURL(client, wellknownURL)
|
wellknownResult := LoadProviderMetadataFromURL(client, wellknownURL)
|
||||||
|
lg(wellknownResult, wellknownURL)
|
||||||
|
|
||||||
if wellknownResult == nil {
|
// We have a candidate.
|
||||||
logging("%s not found.", wellknownURL)
|
if wellknownResult != nil {
|
||||||
} else if len(wellknownResult.Messages) > 0 {
|
|
||||||
// There are issues
|
|
||||||
for _, msg := range wellknownResult.Messages {
|
|
||||||
logging(msg)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// We have a candidate.
|
|
||||||
wellknownGood = wellknownResult
|
wellknownGood = wellknownResult
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -207,8 +220,7 @@ func LoadProviderMetadataForDomain(
|
||||||
return wellknownGood
|
return wellknownGood
|
||||||
}
|
}
|
||||||
|
|
||||||
// Last resort fall back to DNS.
|
// Last resort: fall back to DNS.
|
||||||
|
|
||||||
dnsURL := "https://csaf.data.security." + domain
|
dnsURL := "https://csaf.data.security." + domain
|
||||||
dnsResult := LoadProviderMetadataFromURL(client, dnsURL)
|
dnsResult := LoadProviderMetadataFromURL(client, dnsURL)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -137,7 +137,7 @@ insecure = true
|
||||||
|
|
||||||
[[providers]]
|
[[providers]]
|
||||||
name = "local-dev-provider2"
|
name = "local-dev-provider2"
|
||||||
domain = "localhost"
|
domain = "https://localhost:8443/.well-known/csaf/provider-metadata.json"
|
||||||
# rate = 1.2
|
# rate = 1.2
|
||||||
# insecure = true
|
# insecure = true
|
||||||
write_indices = true
|
write_indices = true
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ insecure = true
|
||||||
|
|
||||||
[[providers]]
|
[[providers]]
|
||||||
name = "local-dev-provider2"
|
name = "local-dev-provider2"
|
||||||
domain = "localhost"
|
domain = "https://localhost:8443/.well-known/csaf/provider-metadata.json"
|
||||||
# rate = 1.2
|
# rate = 1.2
|
||||||
# insecure = true
|
# insecure = true
|
||||||
write_indices = true
|
write_indices = true
|
||||||
|
|
|
||||||
301
util/joinpath.go
Normal file
301
util/joinpath.go
Normal file
|
|
@ -0,0 +1,301 @@
|
||||||
|
// SPDX-License-Identifier: LicenseRef-Go119-BSD-Patentgrant
|
||||||
|
// SPDX-FileCopyrightText: 2009 The Go Authors, Google Inc.
|
||||||
|
|
||||||
|
// The code of this file was extracted and adjusted from
|
||||||
|
// https://cs.opensource.google/go/go/+/refs/tags/go1.19rc2:src/net/url/url.go
|
||||||
|
// by Intevation 2022
|
||||||
|
|
||||||
|
//go:build !go1.19
|
||||||
|
|
||||||
|
package util
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/url"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type encoding int
|
||||||
|
|
||||||
|
const (
|
||||||
|
encodePath encoding = 1 + iota
|
||||||
|
encodePathSegment
|
||||||
|
encodeHost
|
||||||
|
encodeZone
|
||||||
|
encodeUserPassword
|
||||||
|
encodeQueryComponent
|
||||||
|
encodeFragment
|
||||||
|
)
|
||||||
|
|
||||||
|
const upperhex = "0123456789ABCDEF"
|
||||||
|
|
||||||
|
func ishex(c byte) bool {
|
||||||
|
switch {
|
||||||
|
case '0' <= c && c <= '9':
|
||||||
|
return true
|
||||||
|
case 'a' <= c && c <= 'f':
|
||||||
|
return true
|
||||||
|
case 'A' <= c && c <= 'F':
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func unhex(c byte) byte {
|
||||||
|
switch {
|
||||||
|
case '0' <= c && c <= '9':
|
||||||
|
return c - '0'
|
||||||
|
case 'a' <= c && c <= 'f':
|
||||||
|
return c - 'a' + 10
|
||||||
|
case 'A' <= c && c <= 'F':
|
||||||
|
return c - 'A' + 10
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return true if the specified character should be escaped when
|
||||||
|
// appearing in a URL string, according to RFC 3986.
|
||||||
|
//
|
||||||
|
// Please be informed that for now shouldEscape does not check all
|
||||||
|
// reserved characters correctly. See golang.org/issue/5684.
|
||||||
|
func shouldEscape(c byte, mode encoding) bool {
|
||||||
|
// §2.3 Unreserved characters (alphanum)
|
||||||
|
if 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || '0' <= c && c <= '9' {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode == encodeHost || mode == encodeZone {
|
||||||
|
// §3.2.2 Host allows
|
||||||
|
// sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
|
||||||
|
// as part of reg-name.
|
||||||
|
// We add : because we include :port as part of host.
|
||||||
|
// We add [ ] because we include [ipv6]:port as part of host.
|
||||||
|
// We add < > because they're the only characters left that
|
||||||
|
// we could possibly allow, and Parse will reject them if we
|
||||||
|
// escape them (because hosts can't use %-encoding for
|
||||||
|
// ASCII bytes).
|
||||||
|
switch c {
|
||||||
|
case '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=', ':', '[', ']', '<', '>', '"':
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch c {
|
||||||
|
case '-', '_', '.', '~': // §2.3 Unreserved characters (mark)
|
||||||
|
return false
|
||||||
|
|
||||||
|
case '$', '&', '+', ',', '/', ':', ';', '=', '?', '@': // §2.2 Reserved characters (reserved)
|
||||||
|
// Different sections of the URL allow a few of
|
||||||
|
// the reserved characters to appear unescaped.
|
||||||
|
switch mode {
|
||||||
|
case encodePath: // §3.3
|
||||||
|
// The RFC allows : @ & = + $ but saves / ; , for assigning
|
||||||
|
// meaning to individual path segments. This package
|
||||||
|
// only manipulates the path as a whole, so we allow those
|
||||||
|
// last three as well. That leaves only ? to escape.
|
||||||
|
return c == '?'
|
||||||
|
|
||||||
|
case encodePathSegment: // §3.3
|
||||||
|
// The RFC allows : @ & = + $ but saves / ; , for assigning
|
||||||
|
// meaning to individual path segments.
|
||||||
|
return c == '/' || c == ';' || c == ',' || c == '?'
|
||||||
|
|
||||||
|
case encodeUserPassword: // §3.2.1
|
||||||
|
// The RFC allows ';', ':', '&', '=', '+', '$', and ',' in
|
||||||
|
// userinfo, so we must escape only '@', '/', and '?'.
|
||||||
|
// The parsing of userinfo treats ':' as special so we must escape
|
||||||
|
// that too.
|
||||||
|
return c == '@' || c == '/' || c == '?' || c == ':'
|
||||||
|
|
||||||
|
case encodeQueryComponent: // §3.4
|
||||||
|
// The RFC reserves (so we must escape) everything.
|
||||||
|
return true
|
||||||
|
|
||||||
|
case encodeFragment: // §4.1
|
||||||
|
// The RFC text is silent but the grammar allows
|
||||||
|
// everything, so escape nothing.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode == encodeFragment {
|
||||||
|
// RFC 3986 §2.2 allows not escaping sub-delims. A subset of sub-delims are
|
||||||
|
// included in reserved from RFC 2396 §2.2. The remaining sub-delims do not
|
||||||
|
// need to be escaped. To minimize potential breakage, we apply two restrictions:
|
||||||
|
// (1) we always escape sub-delims outside of the fragment, and (2) we always
|
||||||
|
// escape single quote to avoid breaking callers that had previously assumed that
|
||||||
|
// single quotes would be escaped. See issue #19917.
|
||||||
|
switch c {
|
||||||
|
case '!', '(', ')', '*':
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Everything else must be escaped.
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// unescape unescapes a string; the mode specifies
|
||||||
|
// which section of the URL string is being unescaped.
|
||||||
|
func unescape(s string, mode encoding) (string, error) {
|
||||||
|
// Count %, check that they're well-formed.
|
||||||
|
n := 0
|
||||||
|
hasPlus := false
|
||||||
|
for i := 0; i < len(s); {
|
||||||
|
switch s[i] {
|
||||||
|
case '%':
|
||||||
|
n++
|
||||||
|
if i+2 >= len(s) || !ishex(s[i+1]) || !ishex(s[i+2]) {
|
||||||
|
s = s[i:]
|
||||||
|
if len(s) > 3 {
|
||||||
|
s = s[:3]
|
||||||
|
}
|
||||||
|
return "", url.EscapeError(s)
|
||||||
|
}
|
||||||
|
// Per https://tools.ietf.org/html/rfc3986#page-21
|
||||||
|
// in the host component %-encoding can only be used
|
||||||
|
// for non-ASCII bytes.
|
||||||
|
// But https://tools.ietf.org/html/rfc6874#section-2
|
||||||
|
// introduces %25 being allowed to escape a percent sign
|
||||||
|
// in IPv6 scoped-address literals. Yay.
|
||||||
|
if mode == encodeHost && unhex(s[i+1]) < 8 && s[i:i+3] != "%25" {
|
||||||
|
return "", url.EscapeError(s[i : i+3])
|
||||||
|
}
|
||||||
|
if mode == encodeZone {
|
||||||
|
// RFC 6874 says basically "anything goes" for zone identifiers
|
||||||
|
// and that even non-ASCII can be redundantly escaped,
|
||||||
|
// but it seems prudent to restrict %-escaped bytes here to those
|
||||||
|
// that are valid host name bytes in their unescaped form.
|
||||||
|
// That is, you can use escaping in the zone identifier but not
|
||||||
|
// to introduce bytes you couldn't just write directly.
|
||||||
|
// But Windows puts spaces here! Yay.
|
||||||
|
v := unhex(s[i+1])<<4 | unhex(s[i+2])
|
||||||
|
if s[i:i+3] != "%25" && v != ' ' && shouldEscape(v, encodeHost) {
|
||||||
|
return "", url.EscapeError(s[i : i+3])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i += 3
|
||||||
|
case '+':
|
||||||
|
hasPlus = mode == encodeQueryComponent
|
||||||
|
i++
|
||||||
|
default:
|
||||||
|
if (mode == encodeHost || mode == encodeZone) && s[i] < 0x80 && shouldEscape(s[i], mode) {
|
||||||
|
return "", url.InvalidHostError(s[i : i+1])
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if n == 0 && !hasPlus {
|
||||||
|
return s, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var t strings.Builder
|
||||||
|
t.Grow(len(s) - 2*n)
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
switch s[i] {
|
||||||
|
case '%':
|
||||||
|
t.WriteByte(unhex(s[i+1])<<4 | unhex(s[i+2]))
|
||||||
|
i += 2
|
||||||
|
case '+':
|
||||||
|
if mode == encodeQueryComponent {
|
||||||
|
t.WriteByte(' ')
|
||||||
|
} else {
|
||||||
|
t.WriteByte('+')
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
t.WriteByte(s[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return t.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func escape(s string, mode encoding) string {
|
||||||
|
spaceCount, hexCount := 0, 0
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
c := s[i]
|
||||||
|
if shouldEscape(c, mode) {
|
||||||
|
if c == ' ' && mode == encodeQueryComponent {
|
||||||
|
spaceCount++
|
||||||
|
} else {
|
||||||
|
hexCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if spaceCount == 0 && hexCount == 0 {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf [64]byte
|
||||||
|
var t []byte
|
||||||
|
|
||||||
|
required := len(s) + 2*hexCount
|
||||||
|
if required <= len(buf) {
|
||||||
|
t = buf[:required]
|
||||||
|
} else {
|
||||||
|
t = make([]byte, required)
|
||||||
|
}
|
||||||
|
|
||||||
|
if hexCount == 0 {
|
||||||
|
copy(t, s)
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
if s[i] == ' ' {
|
||||||
|
t[i] = '+'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return string(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
j := 0
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
switch c := s[i]; {
|
||||||
|
case c == ' ' && mode == encodeQueryComponent:
|
||||||
|
t[j] = '+'
|
||||||
|
j++
|
||||||
|
case shouldEscape(c, mode):
|
||||||
|
t[j] = '%'
|
||||||
|
t[j+1] = upperhex[c>>4]
|
||||||
|
t[j+2] = upperhex[c&15]
|
||||||
|
j += 3
|
||||||
|
default:
|
||||||
|
t[j] = s[i]
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return string(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func setPath(u *url.URL, p string) error {
|
||||||
|
path, err := unescape(p, encodePath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
u.Path = path
|
||||||
|
if escp := escape(path, encodePath); p == escp {
|
||||||
|
// Default encoding is fine.
|
||||||
|
u.RawPath = ""
|
||||||
|
} else {
|
||||||
|
u.RawPath = p
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// JoinURLPath returns a new URL with the provided path elements joined to
|
||||||
|
// any existing path and the resulting path cleaned of any ./ or ../ elements.
|
||||||
|
// Any sequences of multiple / characters will be reduced to a single /.
|
||||||
|
func JoinURLPath(u *url.URL, elem ...string) *url.URL {
|
||||||
|
|
||||||
|
url := *u
|
||||||
|
if len(elem) > 0 {
|
||||||
|
elem = append([]string{u.EscapedPath()}, elem...)
|
||||||
|
p := path.Join(elem...)
|
||||||
|
// path.Join will remove any trailing slashes.
|
||||||
|
// Preserve at least one.
|
||||||
|
if strings.HasSuffix(elem[len(elem)-1], "/") && !strings.HasSuffix(p, "/") {
|
||||||
|
p += "/"
|
||||||
|
}
|
||||||
|
setPath(&url, p)
|
||||||
|
}
|
||||||
|
return &url
|
||||||
|
}
|
||||||
20
util/joinpath_go119.go
Normal file
20
util/joinpath_go119.go
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
// This file is Free Software under the MIT License
|
||||||
|
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
//
|
||||||
|
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||||
|
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||||
|
|
||||||
|
//go:build go1.19
|
||||||
|
|
||||||
|
package util
|
||||||
|
|
||||||
|
import "net/url"
|
||||||
|
|
||||||
|
// JoinURLPath returns a new URL with the provided path elements joined to
|
||||||
|
// any existing path and the resulting path cleaned of any ./ or ../ elements.
|
||||||
|
// Any sequences of multiple / characters will be reduced to a single /.
|
||||||
|
func JoinURLPath(u *url.URL, elem ...string) *URL {
|
||||||
|
return u.JoinPath(elem...)
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue