mirror of
https://github.com/gocsaf/csaf.git
synced 2025-12-22 11:55:40 +01:00
Use changes.csv instead of index.txt when using dir bases provider to make date filtering possible.
This commit is contained in:
parent
0ad4ed9e36
commit
204ddb5a96
1 changed files with 45 additions and 18 deletions
|
|
@ -9,7 +9,9 @@
|
||||||
package csaf
|
package csaf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"encoding/csv"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
|
@ -173,7 +175,8 @@ func (afp *AdvisoryFileProcessor) Process(
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
files, err := afp.loadIndex(base, lg)
|
// Use changes.csv to be able to filter by age.
|
||||||
|
files, err := afp.loadChanges(base, lg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -186,9 +189,9 @@ func (afp *AdvisoryFileProcessor) Process(
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// loadIndex loads baseURL/index.txt and returns a list of files
|
// loadChanges loads baseURL/changes.csv and returns a list of files
|
||||||
// prefixed by baseURL/.
|
// prefixed by baseURL/.
|
||||||
func (afp *AdvisoryFileProcessor) loadIndex(
|
func (afp *AdvisoryFileProcessor) loadChanges(
|
||||||
baseURL string,
|
baseURL string,
|
||||||
lg func(string, ...any),
|
lg func(string, ...any),
|
||||||
) ([]AdvisoryFile, error) {
|
) ([]AdvisoryFile, error) {
|
||||||
|
|
@ -197,29 +200,53 @@ func (afp *AdvisoryFileProcessor) loadIndex(
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
changesURL := base.JoinPath("changes.csv").String()
|
||||||
|
|
||||||
indexURL := base.JoinPath("index.txt").String()
|
resp, err := afp.client.Get(changesURL)
|
||||||
resp, err := afp.client.Get(indexURL)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, fmt.Errorf("fetching %s failed. Status code %d (%s)",
|
||||||
|
changesURL, resp.StatusCode, resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
var files []AdvisoryFile
|
var files []AdvisoryFile
|
||||||
|
c := csv.NewReader(resp.Body)
|
||||||
scanner := bufio.NewScanner(resp.Body)
|
const (
|
||||||
|
pathColumn = 0
|
||||||
for line := 1; scanner.Scan(); line++ {
|
timeColumn = 1
|
||||||
u := scanner.Text()
|
)
|
||||||
if _, err := url.Parse(u); err != nil {
|
for line := 1; ; line++ {
|
||||||
lg("index.txt contains invalid URL %q in line %d", u, line)
|
r, err := c.Read()
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(r) < 2 {
|
||||||
|
lg("%q has not enough columns in line %d", line)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
t, err := time.Parse(time.RFC3339, r[timeColumn])
|
||||||
|
if err != nil {
|
||||||
|
lg("%q has an invalid time stamp in line %d: %v", changesURL, line, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Apply date range filtering.
|
||||||
|
if afp.AgeAccept != nil && !afp.AgeAccept(t) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
path := r[pathColumn]
|
||||||
|
if _, err := url.Parse(path); err != nil {
|
||||||
|
lg("%q contains an invalid URL %q in line %d", changesURL, path, line)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
files = append(files,
|
files = append(files,
|
||||||
PlainAdvisoryFile(base.JoinPath(u).String()))
|
PlainAdvisoryFile(base.JoinPath(path).String()))
|
||||||
}
|
|
||||||
|
|
||||||
if err := scanner.Err(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
return files, nil
|
return files, nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue