mirror of
https://github.com/gocsaf/csaf.git
synced 2025-12-22 11:55:40 +01:00
Compare commits
832 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
586524a97e | ||
|
|
52ce6bcde6 | ||
|
|
9393271699 | ||
|
|
0630a9a64a | ||
|
|
502376ce3a | ||
|
|
c678a97d43 | ||
|
|
9a37a8ecfa | ||
|
|
d6bac95e45 | ||
|
|
5a1c2a0873 | ||
|
|
8dd4cb4fa8 | ||
|
|
9607f8db94 | ||
|
|
46118544be | ||
|
|
fb59a40609 | ||
|
|
cf9c62fcc0 | ||
|
|
b6281012f5 | ||
|
|
8740244dd8 | ||
|
|
6cc1d7a38f | ||
|
|
ffb1a31944 | ||
|
|
ef44c92f8b | ||
|
|
223570ac9b | ||
|
|
fc012fa820 | ||
|
|
f046ade489 | ||
|
|
c6bad42c24 | ||
|
|
05eae0a9ae | ||
|
|
e3d2a58528 | ||
|
|
04955d6fad | ||
|
|
0dbf822cbd | ||
|
|
bcb7c8be10 | ||
|
|
5c1b061255 | ||
|
|
d1f33ab27d | ||
|
|
187d114631 | ||
|
|
1a2a8fae9c | ||
|
|
f6927154bf | ||
|
|
1f1a2a4cbc | ||
|
|
fa8370bd60 | ||
|
|
7ab964a3e3 | ||
|
|
c7a284bf7f | ||
|
|
08ab318545 | ||
|
|
a2fab16d3b | ||
|
|
108e5f8620 | ||
|
|
100e4d395b | ||
|
|
7fc5600521 | ||
|
|
7f27a63e3c | ||
|
|
230e9f2d2b | ||
|
|
ae184eb189 | ||
|
|
4b4d6ed594 | ||
|
|
7935818600 | ||
|
|
c81f55a752 | ||
|
|
e7c08d05cd | ||
|
|
fc3837d655 | ||
|
|
dad4e54184 | ||
|
|
01c43d96ce | ||
|
|
ca54ba53be | ||
|
|
3262e2ec2a | ||
|
|
bcd34d9fba | ||
|
|
5fd5076f52 | ||
|
|
21ce19735b | ||
|
|
27e9519ed5 | ||
|
|
a7b1291be8 | ||
|
|
7b7d0c4dcb | ||
|
|
a6d0a0c790 | ||
|
|
d54e211ef3 | ||
|
|
c833c00f84 | ||
|
|
4066704c1a | ||
|
|
f154b78340 | ||
|
|
d5778f0755 | ||
|
|
5d37dd1339 | ||
|
|
d09db6635d | ||
|
|
3f4fe5cf18 | ||
|
|
02d4931152 | ||
|
|
9c62e89a23 | ||
|
|
062e145761 | ||
|
|
36aab33de4 | ||
|
|
1098c6add0 | ||
|
|
091854a248 | ||
|
|
ce886f138a | ||
|
|
6ac97810d0 | ||
|
|
cb291bb81b | ||
|
|
12cde3aa3c | ||
|
|
fa1861385a | ||
|
|
dcdbc5d49d | ||
|
|
34705f3c6e | ||
|
|
6955c4e37c | ||
|
|
fc64bf7165 | ||
|
|
161ec1f15c | ||
|
|
3ab00e8759 | ||
|
|
91b5b4543e | ||
|
|
2f599ab017 | ||
|
|
a05ba731dd | ||
|
|
2c5ef1fd5f | ||
|
|
0848143a0b | ||
|
|
5709b14650 | ||
|
|
cf4cf7c6c1 | ||
|
|
5437d8127a | ||
|
|
a7821265ca | ||
|
|
e916f19ee4 | ||
|
|
17f6a3ac7e | ||
|
|
8163f57851 | ||
|
|
527fe71992 | ||
|
|
4429dd6985 | ||
|
|
ed55b659b4 | ||
|
|
534d6f049f | ||
|
|
3cfafa8263 | ||
|
|
3e16741ed5 | ||
|
|
ec0c3f9c2c | ||
|
|
900dcede46 | ||
|
|
24f9af7f26 | ||
|
|
1d1c5698da | ||
|
|
e91bdec201 | ||
|
|
20fdffa5cc | ||
|
|
3afa8d8b2e | ||
|
|
a4a90f4f92 | ||
|
|
6e02de974e | ||
|
|
c208a8fc8c | ||
|
|
82a6929e4d | ||
|
|
02787b24b7 | ||
|
|
7d74543bbb | ||
|
|
69df4c0624 | ||
|
|
84026b682d | ||
|
|
ed22136d49 | ||
|
|
8e5236a2b6 | ||
|
|
6e8c2ecc05 | ||
|
|
93c1a0b185 | ||
|
|
59d2cef082 | ||
|
|
028f468d6f | ||
|
|
5907a391df | ||
|
|
b6721e1d5a | ||
|
|
9275a37a9f | ||
|
|
b8a5fa72d5 | ||
|
|
8fc7f5bfad | ||
|
|
d8e903587a | ||
|
|
95ff418a27 | ||
|
|
bc5d149f74 | ||
|
|
d38150c6a0 | ||
|
|
b1a7620763 | ||
|
|
9dd4b7fc8d | ||
|
|
ebd96011fc | ||
|
|
a3d6d6acfb | ||
|
|
fc404e499c | ||
|
|
df65ad13cb | ||
|
|
68bd04676c | ||
|
|
5b6af7a4ad | ||
|
|
a51964be3f | ||
|
|
16e86051c5 | ||
|
|
938ceb872a | ||
|
|
57953e495f | ||
|
|
1daaed2c51 | ||
|
|
18af28f475 | ||
|
|
b8a98033bf | ||
|
|
56509bbb4d | ||
|
|
a5f4b10c4e | ||
|
|
ffb4eff933 | ||
|
|
678f232a9a | ||
|
|
2435abe3e1 | ||
|
|
3dc84f3537 | ||
|
|
a167bf65ad | ||
|
|
b2180849e9 | ||
|
|
7824f3b48d | ||
|
|
9495d8b1c3 | ||
|
|
f6d7589fde | ||
|
|
fe4f01d062 | ||
|
|
01645f5559 | ||
|
|
de047b7682 | ||
|
|
c00dc36547 | ||
|
|
1e3504c753 | ||
|
|
ace8aeaf98 | ||
|
|
3e9b5e1ebb | ||
|
|
e8706e5eb9 | ||
|
|
ffadad38c6 | ||
|
|
91207f2b7b | ||
|
|
1c860a1ab2 | ||
|
|
1aad5331d2 | ||
|
|
7aa95c03ca | ||
|
|
6ebe7f5f5d | ||
|
|
bf057e2fa8 | ||
|
|
bdd8aa0a94 | ||
|
|
18e2e35e7c | ||
|
|
f7dc3f5ec7 | ||
|
|
c0de0c2b6d | ||
|
|
a70a04e169 | ||
|
|
f36c96e798 | ||
|
|
c148a18dba | ||
|
|
464e88b530 | ||
|
|
37c9eaf346 | ||
|
|
5231b3386b | ||
|
|
c2e24f7bbb | ||
|
|
108c2f5508 | ||
|
|
9037574d96 | ||
|
|
8feddc70e1 | ||
|
|
13a635c7e3 | ||
|
|
1a2ce684ff | ||
|
|
be2e4e7424 | ||
|
|
3a67fb5210 | ||
|
|
0ab851a874 | ||
|
|
a131b0fb4b | ||
|
|
257c316894 | ||
|
|
bcf4d2f64a | ||
|
|
1e531de82d | ||
|
|
51dc9b5bcb | ||
|
|
a46c286cf4 | ||
|
|
cb1ed601dd | ||
|
|
5c6736b178 | ||
|
|
3084cdbc37 | ||
|
|
56fadc3a80 | ||
|
|
e2ad3d3f83 | ||
|
|
33bd6bd787 | ||
|
|
7a5347803a | ||
|
|
2f9d5658eb | ||
|
|
158b322a15 | ||
|
|
617deb4c17 | ||
|
|
1ec4a5cb5b | ||
|
|
a608cb0b17 | ||
|
|
c704275a38 | ||
|
|
684770ff2e | ||
|
|
1fde81b779 | ||
|
|
b553940769 | ||
|
|
85b67f64ef | ||
|
|
005e661479 | ||
|
|
457d519990 | ||
|
|
9b1480ae3d | ||
|
|
d64aa20cee | ||
|
|
73aef07063 | ||
|
|
455a575a70 | ||
|
|
fa96e69dd1 | ||
|
|
39a29e39f1 | ||
|
|
fb1cf32e17 | ||
|
|
e658738b56 | ||
|
|
d909e9de15 | ||
|
|
51a681ef31 | ||
|
|
b858640fc1 | ||
|
|
9a1c66eb8e | ||
|
|
6c8b3757aa | ||
|
|
03e418182d | ||
|
|
9073a8a282 | ||
|
|
b457dc872f | ||
|
|
d4ef21531a | ||
|
|
91ab7f6b1c | ||
|
|
a6bf44f7cc | ||
|
|
fb7c77b419 | ||
|
|
4a9f8a6f03 | ||
|
|
318c898a83 | ||
|
|
2fe836bed7 | ||
|
|
3935d9aa7a | ||
|
|
9e4a519fff | ||
|
|
6f8870154c | ||
|
|
a413852627 | ||
|
|
e27d64e42c | ||
|
|
0a2b69bd55 | ||
|
|
e2ab1903e7 | ||
|
|
65fae93a81 | ||
|
|
466d2c6ab7 | ||
|
|
1579065453 | ||
|
|
21ec5ad8e1 | ||
|
|
aa3604ac3d | ||
|
|
086c4ab48b | ||
|
|
77cc250561 | ||
|
|
06d8e59b66 | ||
|
|
7f9449a12f | ||
|
|
0fe118f7c1 | ||
|
|
effd4a01af | ||
|
|
26c630df4a | ||
|
|
7fbc012e2c | ||
|
|
03a907b9b8 | ||
|
|
21fa98186c | ||
|
|
0905824e02 | ||
|
|
455010dc64 | ||
|
|
5215d78331 | ||
|
|
0b5c7a27c9 | ||
|
|
d9e579242b | ||
|
|
226dc961f3 | ||
|
|
81edb6ccbe | ||
|
|
abc8b10988 | ||
|
|
8f6e6ee8bb | ||
|
|
3923dc7044 | ||
|
|
1e506d46cc | ||
|
|
e354e4b201 | ||
|
|
c05a4023ff | ||
|
|
5f2596665a | ||
|
|
d69101924b | ||
|
|
666913e61e | ||
|
|
3ba37b41c7 | ||
|
|
716f128754 | ||
|
|
1cc42f0ec0 | ||
|
|
2bb2a2e018 | ||
|
|
0198cb470a | ||
|
|
7a8cdb6d19 | ||
|
|
703127a6b3 | ||
|
|
2e5038f06d | ||
|
|
37f770ed3d | ||
|
|
a569b573a4 | ||
|
|
0ec5f22a74 | ||
|
|
7cd076d4f8 | ||
|
|
e470529dfb | ||
|
|
49da14d47f | ||
|
|
0c9516ac08 | ||
|
|
fca3f5bb9b | ||
|
|
b0d7dbb387 | ||
|
|
6b07885848 | ||
|
|
52476f8560 | ||
|
|
1854cc4c58 | ||
|
|
abce4e7f78 | ||
|
|
62290215ec | ||
|
|
21fbd401b7 | ||
|
|
1f7d5ada14 | ||
|
|
3de42e746d | ||
|
|
17945d67ac | ||
|
|
22e6d49f3f | ||
|
|
b06c316ee0 | ||
|
|
b14d775422 | ||
|
|
a495416882 | ||
|
|
be50b7fc3a | ||
|
|
e4c636fe41 | ||
|
|
2a82c53585 | ||
|
|
b3332cf288 | ||
|
|
20b2bd27b3 | ||
|
|
37cdda7c42 | ||
|
|
c8f1361c52 | ||
|
|
b5db976f05 | ||
|
|
f145a633c1 | ||
|
|
094fe37026 | ||
|
|
bdd7f24b31 | ||
|
|
4da9f67e2e | ||
|
|
ed42f193d1 | ||
|
|
f868b13c24 | ||
|
|
5a3661e81b | ||
|
|
3a7b411789 | ||
|
|
daab24eb2f | ||
|
|
2e129b9dc8 | ||
|
|
a217f88ea1 | ||
|
|
dc41aae07f | ||
|
|
4206c2e4b3 | ||
|
|
b03df5508a | ||
|
|
f45d273af9 | ||
|
|
58bad8a6cf | ||
|
|
22ef2a925e | ||
|
|
7f36ecb48c | ||
|
|
3acabdf73b | ||
|
|
4fc2fd9bf2 | ||
|
|
f59a8cc7a9 | ||
|
|
12d24647c6 | ||
|
|
11c1a2cfbb | ||
|
|
e821683423 | ||
|
|
96608a07fe | ||
|
|
a1ea10baf9 | ||
|
|
24151345f5 | ||
|
|
018e0e55f7 | ||
|
|
5459f10d39 | ||
|
|
e0475791ff | ||
|
|
4dfa2dd552 | ||
|
|
12815430ec | ||
|
|
8d51577e49 | ||
|
|
4b56f3e837 | ||
|
|
7651dc2a05 | ||
|
|
7d3c3a68df | ||
|
|
f2657bb51a | ||
|
|
5c935901ab | ||
|
|
824079899e | ||
|
|
a153906d03 | ||
|
|
79fbc2bcd8 | ||
|
|
bda7ade837 | ||
|
|
0b914f7e7a | ||
|
|
d49049c3af | ||
|
|
42709a8554 | ||
|
|
4cd376a9ca | ||
|
|
468e91cb8b | ||
|
|
f4d00cd9d8 | ||
|
|
7a202ddfdc | ||
|
|
bb09567771 | ||
|
|
7a6cbd182c | ||
|
|
8d381385b0 | ||
|
|
7464ade6ae | ||
|
|
4c6fd5457f | ||
|
|
1e4c5d863d | ||
|
|
9b684adae8 | ||
|
|
4cd0fc3bb4 | ||
|
|
e2ad76fa69 | ||
|
|
3bfff999e1 | ||
|
|
dd1e38fc0c | ||
|
|
7bab18fc41 | ||
|
|
7459f0606a | ||
|
|
a7be72b740 | ||
|
|
fa0a66570f | ||
|
|
2d1dc180c8 | ||
|
|
a4d7bea89f | ||
|
|
8315ad9918 | ||
|
|
083ccc10f1 | ||
|
|
6276866cad | ||
|
|
bb625790b2 | ||
|
|
8c95795b96 | ||
|
|
411d6ffd16 | ||
|
|
65f635d42d | ||
|
|
16603eacd3 | ||
|
|
f9fc7e773d | ||
|
|
d42db07bec | ||
|
|
38dfabde1a | ||
|
|
b0b3852e99 | ||
|
|
47e55a33bc | ||
|
|
f31ee53c27 | ||
|
|
017a6b0a10 | ||
|
|
873eb4879b | ||
|
|
b6e5af9b49 | ||
|
|
607bd0ebe1 | ||
|
|
85f9d02ac0 | ||
|
|
98bf2990ae | ||
|
|
5a4e5607cb | ||
|
|
97304ab38e | ||
|
|
8aa31984df | ||
|
|
2864176111 | ||
|
|
383b0ca77b | ||
|
|
1d9969162f | ||
|
|
8aed2c034e | ||
|
|
505693b3f0 | ||
|
|
bb0df4cd55 | ||
|
|
8a9dd6e842 | ||
|
|
2271c50ee7 | ||
|
|
690efbe075 | ||
|
|
ac8d8a9196 | ||
|
|
5e5074fbf1 | ||
|
|
125028773f | ||
|
|
1d892ff681 | ||
|
|
eade9f7ae4 | ||
|
|
5e6fb8241c | ||
|
|
975e350510 | ||
|
|
1f301b6301 | ||
|
|
9e665a2fa1 | ||
|
|
204ddb5a96 | ||
|
|
0ad4ed9e36 | ||
|
|
f8c3741d12 | ||
|
|
de0599ebe3 | ||
|
|
0e297fc616 | ||
|
|
31a37a4daf | ||
|
|
655b8f4db1 | ||
|
|
1bdaf5854a | ||
|
|
9697e99d86 | ||
|
|
19433856ca | ||
|
|
95bd705036 | ||
|
|
27ec66353c | ||
|
|
81ead2776b | ||
|
|
bfcf98464f | ||
|
|
12ad7706e3 | ||
|
|
0d17db0c59 | ||
|
|
8630e8bac2 | ||
|
|
de27a668d1 | ||
|
|
f05bcd3642 | ||
|
|
aeeb169111 | ||
|
|
b423eed4e9 | ||
|
|
4d68662913 | ||
|
|
fddc363344 | ||
|
|
a95ff9faf0 | ||
|
|
d8ad56956d | ||
|
|
8032d47b50 | ||
|
|
be3dfcd542 | ||
|
|
f97891c283 | ||
|
|
b61912410a | ||
|
|
540d02d367 | ||
|
|
04c11d7922 | ||
|
|
c1765e6967 | ||
|
|
569822486b | ||
|
|
65536f51a4 | ||
|
|
a02d9c36a7 | ||
|
|
55f6a48db1 | ||
|
|
daa4a6bf7a | ||
|
|
594e6b4b0d | ||
|
|
9967bfffe6 | ||
|
|
18732f26ba | ||
|
|
7dc1a6530e | ||
|
|
60760ee868 | ||
|
|
d393a42d61 | ||
|
|
248e0a52a4 | ||
|
|
ed26e8e41d | ||
|
|
813c083198 | ||
|
|
20bf16bd4f | ||
|
|
421a05d421 | ||
|
|
d5589a018d | ||
|
|
2ec8be4e8c | ||
|
|
8d269ce106 | ||
|
|
5614939562 | ||
|
|
a9dcfc26f3 | ||
|
|
c6d0e9a9e2 | ||
|
|
71a3c3a13b | ||
|
|
b5d1924d3f | ||
|
|
3e5137dd2f | ||
|
|
172c1cd85c | ||
|
|
f74c5123c2 | ||
|
|
d91af558ce | ||
|
|
051de5194d | ||
|
|
380ccfdf5a | ||
|
|
d7fb52b735 | ||
|
|
fd374b30b6 | ||
|
|
719ecaea76 | ||
|
|
87dbb5674b | ||
|
|
f4f3efb197 | ||
|
|
7139f4dfa9 | ||
|
|
8d45525e7f | ||
|
|
51035c0dc9 | ||
|
|
7501c60bf4 | ||
|
|
c7453a6448 | ||
|
|
7eae607810 | ||
|
|
55540a32e0 | ||
|
|
c3ef8e604c | ||
|
|
3ff7e16569 | ||
|
|
99cf30f660 | ||
|
|
cf49c7e414 | ||
|
|
9d1000d773 | ||
|
|
3eced62af6 | ||
|
|
02d476360b | ||
|
|
4461bd6892 | ||
|
|
1dab0cc9ff | ||
|
|
a0b272a60d | ||
|
|
150db4d31b | ||
|
|
068a94235c | ||
|
|
9ac902347c | ||
|
|
aeff511895 | ||
|
|
bd7831d7c3 | ||
|
|
018a1814f0 | ||
|
|
2e968b197d | ||
|
|
e0928f58ad | ||
|
|
dd15eea48e | ||
|
|
c4e9637f2b | ||
|
|
d9fe7488d3 | ||
|
|
821f018a98 | ||
|
|
5b4c621616 | ||
|
|
6a91c29baf | ||
|
|
c263391821 | ||
|
|
f32fba683d | ||
|
|
91479c9912 | ||
|
|
8ad805e1e5 | ||
|
|
c37b127d82 | ||
|
|
900da91687 | ||
|
|
a92c033a5e | ||
|
|
04d2c96be0 | ||
|
|
21477e8004 | ||
|
|
3590cf1ef2 | ||
|
|
36fa8eeeca | ||
|
|
1529821c2c | ||
|
|
e39fc34599 | ||
|
|
84b60261bf | ||
|
|
6bceb7cc1b | ||
|
|
c59a8f07a3 | ||
|
|
b0d7df69b8 | ||
|
|
1854678409 | ||
|
|
3445e58e45 | ||
|
|
0c2768b711 | ||
|
|
c3a80b9f52 | ||
|
|
39787503cc | ||
|
|
f638ae9a23 | ||
|
|
4800f4ec12 | ||
|
|
0c4ae88ee0 | ||
|
|
dd0be44e81 | ||
|
|
a28ebe39cb | ||
|
|
bb053bd427 | ||
|
|
8f87273837 | ||
|
|
92433c1272 | ||
|
|
aa574406cf | ||
|
|
47d9eccc37 | ||
|
|
4b76afd390 | ||
|
|
444ee20c46 | ||
|
|
304b2d023b | ||
|
|
fabeda2797 | ||
|
|
0708f92636 | ||
|
|
9c9fc6b8fc | ||
|
|
b5e0718fa1 | ||
|
|
dcc3507aab | ||
|
|
83a80a970c | ||
|
|
c479d94605 | ||
|
|
987da1910b | ||
|
|
706fba8e3c | ||
|
|
2f661ede5c | ||
|
|
e59640726c | ||
|
|
dbad3e3c7f | ||
|
|
1bdf207e4b | ||
|
|
704a98ab38 | ||
|
|
a3ee00568f | ||
|
|
bebeec30e0 | ||
|
|
f38cff13d6 | ||
|
|
39b48e083c | ||
|
|
05ad714619 | ||
|
|
4b4a6a4dbd | ||
|
|
38c6da8212 | ||
|
|
35dc25a5e7 | ||
|
|
a3897ae21f | ||
|
|
3bf1af0e9f | ||
|
|
80195a24c3 | ||
|
|
ffb29f5ba4 | ||
|
|
b80163c35f | ||
|
|
a90830b07c | ||
|
|
69908fc2c8 | ||
|
|
9990a8ac9b | ||
|
|
4453484811 | ||
|
|
e6c9f4d4da | ||
|
|
970a891422 | ||
|
|
361656faf0 | ||
|
|
7cc37bd9fc | ||
|
|
e998133429 | ||
|
|
7a5f8701bd | ||
|
|
8425644886 | ||
|
|
cbd9dead37 | ||
|
|
6430712dad | ||
|
|
052dbbe1d0 | ||
|
|
f60ec5fea4 | ||
|
|
15e615e653 | ||
|
|
1004fe7c65 | ||
|
|
2f280c69ac | ||
|
|
a8493c0dd2 | ||
|
|
6a60e8d8ce | ||
|
|
0c39819930 | ||
|
|
adf98736cc | ||
|
|
2b55bbb341 | ||
|
|
acb3d96c70 | ||
|
|
c8e0804eba | ||
|
|
0383e951a8 | ||
|
|
9abc83edc1 | ||
|
|
2a40ab6393 | ||
|
|
ec2881a7e1 | ||
|
|
8e5dd88a62 | ||
|
|
ddf0747327 | ||
|
|
18e6fee2e7 | ||
|
|
51fba46893 | ||
|
|
0745a0943d | ||
|
|
ff31ebfa0f | ||
|
|
ad5c678abf | ||
|
|
c143a4620b | ||
|
|
de64b88491 | ||
|
|
732383561b | ||
|
|
7e9ea2c9ac | ||
|
|
69dda45bac | ||
|
|
6dedeff7fc | ||
|
|
e004939abf | ||
|
|
bcc31c0cd6 | ||
|
|
bf6dfafffd | ||
|
|
6bf8c530c6 | ||
|
|
8c8ccf6a2e | ||
|
|
1d0499ddea | ||
|
|
c15125a393 | ||
|
|
5b60e7d728 | ||
|
|
c4b70d20cd | ||
|
|
1189d538b3 | ||
|
|
3e09094abd | ||
|
|
5c334ea508 | ||
|
|
6b9ecead89 | ||
|
|
e4128b89e4 | ||
|
|
12025e721e | ||
|
|
d28869f083 | ||
|
|
87b7844b8b | ||
|
|
4b13e77f6c | ||
|
|
70b4e18b58 | ||
|
|
3bb8ea0019 | ||
|
|
a62484c31c | ||
|
|
124794c4aa | ||
|
|
65c7925ed2 | ||
|
|
0e1b908695 | ||
|
|
2575302fbd | ||
|
|
26a8fb0b55 | ||
|
|
2656312ab1 | ||
|
|
7dbc918721 | ||
|
|
a98aa794a8 | ||
|
|
f0567ffa15 | ||
|
|
06bd16db47 | ||
|
|
a1f446f443 | ||
|
|
4f3f7efd5a | ||
|
|
37d8a8d6df | ||
|
|
b608746fac | ||
|
|
8e0812c82f | ||
|
|
696fb74b36 | ||
|
|
fd0ae57443 | ||
|
|
28616755dd | ||
|
|
f469df7cec | ||
|
|
b6f4172ff9 | ||
|
|
bdb24e72ab | ||
|
|
00a0fb68d2 | ||
|
|
9a7fbea7b6 | ||
|
|
bf95140dbe | ||
|
|
c27fa41c2f | ||
|
|
2f65019e45 | ||
|
|
ef829131e1 | ||
|
|
6547ed0a4b | ||
|
|
892a0b941b | ||
|
|
29d7dd1223 | ||
|
|
fad70b4dd5 | ||
|
|
9890a417b4 | ||
|
|
141fbe21ca | ||
|
|
050e225d07 | ||
|
|
ada8070c63 | ||
|
|
8e13d37756 | ||
|
|
dce3d1f4a7 | ||
|
|
8af0aeea46 | ||
|
|
4c0785c060 | ||
|
|
f325723e74 | ||
|
|
8478322111 | ||
|
|
fcafcbf13f | ||
|
|
e5f584092c | ||
|
|
1241429d19 | ||
|
|
be15d43dd3 | ||
|
|
86fb441446 | ||
|
|
2614c1a4ba | ||
|
|
bed44e5e87 | ||
|
|
490fe4a589 | ||
|
|
dce0a9b51f | ||
|
|
4abddee3bb | ||
|
|
8df67a7b1d | ||
|
|
101dd6880c | ||
|
|
677b0fdd4f | ||
|
|
c511a971d6 | ||
|
|
016188060f | ||
|
|
52724d0a4a | ||
|
|
92f6bc5a31 | ||
|
|
7cb376dd0e | ||
|
|
772e6351b8 | ||
|
|
58ec57e1cb | ||
|
|
1b0e5b4238 | ||
|
|
82a1a1997a | ||
|
|
60b1db2da1 | ||
|
|
ca69997e2b | ||
|
|
3769f1d338 | ||
|
|
d1855a9c30 | ||
|
|
6a605fdbcc | ||
|
|
94fe738b8c | ||
|
|
35c31e9a76 | ||
|
|
88a2d585be | ||
|
|
82feb18eef | ||
|
|
d5b9aa9ce0 | ||
|
|
5716374eac | ||
|
|
a84afa35cd | ||
|
|
7965917168 | ||
|
|
16ff06180c | ||
|
|
a1d609c7d8 | ||
|
|
0375e22747 | ||
|
|
9bb65a8cf8 | ||
|
|
cca460c77b | ||
|
|
3a43ca5630 | ||
|
|
6bf7b52890 | ||
|
|
4c1fdd2289 | ||
|
|
6a87157184 | ||
|
|
798e11ad18 | ||
|
|
649b5c904b | ||
|
|
5caed04dc8 | ||
|
|
e424cd3d6c | ||
|
|
efa233f2ce | ||
|
|
86a015d6bf | ||
|
|
8e18b6f36f | ||
|
|
8b57851486 | ||
|
|
9cba4eec30 | ||
|
|
332f0b2711 | ||
|
|
7f113f1f30 | ||
|
|
7e850f7a2f | ||
|
|
8c53b4068b | ||
|
|
b39553fc9a | ||
|
|
ec0548bef6 | ||
|
|
324de3abca | ||
|
|
0171715390 | ||
|
|
c00b8b37bb | ||
|
|
56a047cdde | ||
|
|
2ac7940206 | ||
|
|
50024915f4 | ||
|
|
46f79a9e24 | ||
|
|
cbb9c7a7a1 | ||
|
|
3ed1d3ab21 | ||
|
|
15f05fc122 | ||
|
|
73e216eccc | ||
|
|
ff2eee39e5 | ||
|
|
12693409a8 | ||
|
|
f06aae9261 | ||
|
|
dc8c89c886 | ||
|
|
69c3030eef | ||
|
|
20f5937240 | ||
|
|
acf5df9d56 | ||
|
|
141acc802e | ||
|
|
e37fe68b34 | ||
|
|
7f62caeedc | ||
|
|
7bafb210cf | ||
|
|
198e5b8897 | ||
|
|
72a7240fd0 | ||
|
|
df21b2575d | ||
|
|
03ae9892bb | ||
|
|
d2d3611278 | ||
|
|
4b7b93bd1d | ||
|
|
14fc9cdc61 | ||
|
|
da4dda9042 | ||
|
|
ed2df66ce6 | ||
|
|
3a3ef7a961 | ||
|
|
7a023dc22c | ||
|
|
b95cd0db74 | ||
|
|
bab5ebdc78 | ||
|
|
e25fe66ee8 | ||
|
|
bace61e0b3 | ||
|
|
38d3679704 | ||
|
|
f8ce08a26e | ||
|
|
0b19782374 | ||
|
|
b359fd0a62 | ||
|
|
640ef64df9 | ||
|
|
9a390e2bcc | ||
|
|
e1966a5716 | ||
|
|
07ea594e22 | ||
|
|
36384ca942 | ||
|
|
847ca3d6af | ||
|
|
a899376b8c | ||
|
|
1a6829c9ea | ||
|
|
78d8b89aca | ||
|
|
7cbbb4bf81 | ||
|
|
05915526c2 | ||
|
|
3f712584d0 | ||
|
|
004519c9fc | ||
|
|
aa523e5659 | ||
|
|
aa7aadf3f1 | ||
|
|
3465938508 | ||
|
|
22a6543932 | ||
|
|
a680d909e1 | ||
|
|
21960febe1 | ||
|
|
80eca02ca0 | ||
|
|
bb0a30aba3 | ||
|
|
0268d989b5 | ||
|
|
8fdda14aee | ||
|
|
caea539b45 | ||
|
|
041621cda7 | ||
|
|
fa434fa039 | ||
|
|
86a6f9abde | ||
|
|
1e9d31277d | ||
|
|
2961a70bf2 | ||
|
|
7e7947a601 | ||
|
|
d4f68a9b08 | ||
|
|
589547fa94 | ||
|
|
22b945a3c3 | ||
|
|
006f088082 | ||
|
|
1344885092 | ||
|
|
714814a44b | ||
|
|
5ca768b5e2 | ||
|
|
8f396bbe31 | ||
|
|
6ec2131f5d | ||
|
|
57fc012ec2 | ||
|
|
922e468d99 | ||
|
|
1e3f5ee1c4 | ||
|
|
8c7e03c882 |
197 changed files with 19861 additions and 2584 deletions
4
.github/workflows/generate-markdown.yml
vendored
4
.github/workflows/generate-markdown.yml
vendored
|
|
@ -13,8 +13,8 @@ jobs:
|
|||
auto-update-readme:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v5
|
||||
- name: Markdown autodocs
|
||||
uses: dineshsonachalam/markdown-autodocs@v1.0.4
|
||||
uses: dineshsonachalam/markdown-autodocs@v1.0.7
|
||||
with:
|
||||
output_file_paths: '[./README.md, ./docs/*.md]'
|
||||
|
|
|
|||
26
.github/workflows/go-oldstable.yml
vendored
Normal file
26
.github/workflows/go-oldstable.yml
vendored
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
name: Go Test (oldstable)
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "**.go"
|
||||
pull_request:
|
||||
paths:
|
||||
- "**.go"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: 'oldstable'
|
||||
|
||||
- name: Build
|
||||
run: go build -v ./cmd/...
|
||||
|
||||
- name: Tests
|
||||
run: go test -v ./...
|
||||
40
.github/workflows/go.yml
vendored
40
.github/workflows/go.yml
vendored
|
|
@ -4,17 +4,21 @@ on:
|
|||
push:
|
||||
paths:
|
||||
- "**.go"
|
||||
pull_request:
|
||||
paths:
|
||||
- "**.go"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v2
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: 1.17
|
||||
go-version: "stable"
|
||||
|
||||
- name: Build
|
||||
run: go build -v ./cmd/...
|
||||
|
|
@ -23,12 +27,36 @@ jobs:
|
|||
run: go vet ./...
|
||||
|
||||
- name: gofmt
|
||||
uses: Jerome1337/gofmt-action@v1.0.4
|
||||
uses: Jerome1337/gofmt-action@v1.0.5
|
||||
with:
|
||||
gofmt-flags: "-l -d"
|
||||
|
||||
- name: golint
|
||||
uses: Jerome1337/golint-action@v1.0.2
|
||||
- name: Revive Action
|
||||
uses: morphy2k/revive-action@v2
|
||||
|
||||
- name: Tests
|
||||
run: go test -v ./...
|
||||
|
||||
run_modver:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build # Only run when build job was successful
|
||||
if: ${{ github.event_name == 'pull_request' && success() }}
|
||||
permissions:
|
||||
contents: read # Modver needs to read the repo content
|
||||
pull-requests: write # Modver needs to write comments/status on PRs
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0 # Modver needs full history for comparison
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: "stable"
|
||||
|
||||
- name: Modver
|
||||
uses: bobg/modver@v2.12.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
pull_request_url: https://github.com/${{ github.repository }}/pull/${{ github.event.number }}
|
||||
|
|
|
|||
23
.github/workflows/itest.yml
vendored
23
.github/workflows/itest.yml
vendored
|
|
@ -5,32 +5,41 @@ jobs:
|
|||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v2
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: 1.17
|
||||
go-version-file: "go.mod"
|
||||
check-latest: true
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
|
||||
- name: Execute the scripts
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y make nginx fcgiwrap gnutls-bin
|
||||
cp -r $GITHUB_WORKSPACE ~
|
||||
cd ~
|
||||
cd csaf_distribution/docs/scripts/
|
||||
cd csaf/docs/scripts/
|
||||
# keep in sync with docs/scripts/Readme.md
|
||||
export FOLDERNAME=devca1 ORGANAME="CSAF Tools Development (internal)"
|
||||
source ./TLSConfigsForITest.sh
|
||||
./TLSClientConfigsForITest.sh
|
||||
./setupProviderForITest.sh
|
||||
./testAggregator.sh
|
||||
./testDownloader.sh
|
||||
shell: bash
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: checker-results
|
||||
path: ~/checker-results.html
|
||||
path: |
|
||||
~/checker-results.html
|
||||
~/checker-results-no-clientcert.json
|
||||
if-no-files-found: error
|
||||
|
|
|
|||
18
.github/workflows/release.yml
vendored
18
.github/workflows/release.yml
vendored
|
|
@ -7,22 +7,26 @@ on:
|
|||
jobs:
|
||||
releases-matrix:
|
||||
name: Release Go binaries
|
||||
runs-on: ubuntu-latest
|
||||
# use oldest available ubuntu to be compatible with more libc.so revs.
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: '^1.18.2'
|
||||
go-version: '^1.24.9'
|
||||
check-latest: true
|
||||
|
||||
- name: Build
|
||||
run: make dist
|
||||
|
||||
- name: Upload release assets
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: |
|
||||
dist/csaf_distribution-*.zip
|
||||
dist/csaf_distribution-*.tar.gz
|
||||
dist/csaf-*.zip
|
||||
dist/csaf-*.tar.gz
|
||||
|
|
|
|||
22
.gitignore
vendored
Normal file
22
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# If you prefer the allow list template instead of the deny list, see community template:
|
||||
# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
|
||||
#
|
||||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
bin-*/
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
vendor/
|
||||
|
||||
# Go workspace file
|
||||
go.work
|
||||
|
|
@ -21,3 +21,4 @@
|
|||
| github.com/gofrs/flock | BSD-3-Clause |
|
||||
| github.com/PuerkitoBio/goquery | BSD-3-Clause |
|
||||
| github.com/andybalholm/cascadia | BSD-2-Clause |
|
||||
| go.etcd.io/bbolt | MIT |
|
||||
|
|
|
|||
73
LICENSE-Apache-2.0.txt
Normal file
73
LICENSE-Apache-2.0.txt
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
73
LICENSES/Apache-2.0.txt
Normal file
73
LICENSES/Apache-2.0.txt
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
11
LICENSES/BSD-3-Clause.txt
Normal file
11
LICENSES/BSD-3-Clause.txt
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
Copyright (c) <year> <owner>.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
101
Makefile
101
Makefile
|
|
@ -1,26 +1,26 @@
|
|||
# This file is Free Software under the MIT License
|
||||
# without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
# This file is Free Software under the Apache-2.0 License
|
||||
# without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
# SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
# Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
#
|
||||
# Makefile to build csaf_distribution components
|
||||
# Makefile to build csaf components
|
||||
|
||||
SHELL = /bin/bash
|
||||
BUILD = go build
|
||||
MKDIR = mkdir -p
|
||||
|
||||
.PHONY: build build_linux build_win tag_checked_out mostlyclean
|
||||
.PHONY: build build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64 tag_checked_out mostlyclean
|
||||
|
||||
all:
|
||||
@echo choose a target from: build build_linux build_win mostlyclean
|
||||
@echo choose a target from: build build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64 mostlyclean
|
||||
@echo prepend \`make BUILDTAG=1\` to checkout the highest git tag before building
|
||||
@echo or set BUILDTAG to a specific tag
|
||||
|
||||
# Build all binaries
|
||||
build: build_linux build_win
|
||||
build: build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64
|
||||
|
||||
# if BUILDTAG == 1 set it to the highest git tag
|
||||
ifeq ($(strip $(BUILDTAG)),1)
|
||||
|
|
@ -29,7 +29,7 @@ endif
|
|||
|
||||
ifdef BUILDTAG
|
||||
# add the git tag checkout to the requirements of our build targets
|
||||
build_linux build_win: tag_checked_out
|
||||
build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64: tag_checked_out
|
||||
endif
|
||||
|
||||
tag_checked_out:
|
||||
|
|
@ -41,48 +41,101 @@ tag_checked_out:
|
|||
# into a semver version. For this we increase the PATCH number, so that
|
||||
# any commit after a tag is considered newer than the semver from the tag
|
||||
# without an optional 'v'
|
||||
# Note we need `--tags` because github release only creates lightweight tags
|
||||
# Note we need `--tags` because github releases only create lightweight tags
|
||||
# (see feature request https://github.com/github/feedback/discussions/4924).
|
||||
# We use `--always` in case of being run as github action with shallow clone.
|
||||
GITDESC := $(shell git describe --tags --always)
|
||||
# In this case we might in some situations see an error like
|
||||
# `/bin/bash: line 1: 2b55bbb: value too great for base (error token is "2b55bbb")`
|
||||
# which can be ignored.
|
||||
GITDESC := $(shell git describe --tags --always --dirty=-modified 2>/dev/null || true)
|
||||
CURRENT_FOLDER_NAME := $(notdir $(CURDIR))
|
||||
ifeq ($(strip $(GITDESC)),)
|
||||
SEMVER := $(CURRENT_FOLDER_NAME)
|
||||
else
|
||||
GITDESCPATCH := $(shell echo '$(GITDESC)' | sed -E 's/v?[0-9]+\.[0-9]+\.([0-9]+)[-+]?.*/\1/')
|
||||
SEMVERPATCH := $(shell echo $$(( $(GITDESCPATCH) + 1 )))
|
||||
# Hint: The regexp in the next line only matches if there is a hyphen (`-`)
|
||||
# followed by a number, by which we assume that git describe
|
||||
# has added a string after the tag
|
||||
SEMVER := $(shell echo '$(GITDESC)' | sed -E 's/v?([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' )
|
||||
# Hint: The second regexp in the next line only matches
|
||||
# if there is a hyphen (`-`) followed by a number,
|
||||
# by which we assume that git describe has added a string after the tag
|
||||
SEMVER := $(shell echo '$(GITDESC)' | sed -E -e 's/^v//' -e 's/([0-9]+\.[0-9]+\.)([0-9]+)(-[1-9].*)/\1$(SEMVERPATCH)\3/' )
|
||||
endif
|
||||
testsemver:
|
||||
@echo from \'$(GITDESC)\' transformed to \'$(SEMVER)\'
|
||||
|
||||
|
||||
# Set -ldflags parameter to pass the semversion.
|
||||
LDFLAGS = -ldflags "-X github.com/csaf-poc/csaf_distribution/util.SemVersion=$(SEMVER)"
|
||||
LDFLAGS = -ldflags "-X github.com/gocsaf/csaf/v3/util.SemVersion=$(SEMVER)"
|
||||
|
||||
# Build binaries and place them under bin-$(GOOS)-$(GOARCH)
|
||||
# Using 'Target-specific Variable Values' to specify the build target system
|
||||
|
||||
GOARCH = amd64
|
||||
build_linux: GOOS = linux
|
||||
build_win: GOOS = windows
|
||||
build_linux: GOOS=linux
|
||||
build_linux: GOARCH=amd64
|
||||
|
||||
build_linux build_win:
|
||||
build_win: GOOS=windows
|
||||
build_win: GOARCH=amd64
|
||||
|
||||
build_mac_amd64: GOOS=darwin
|
||||
build_mac_amd64: GOARCH=amd64
|
||||
|
||||
build_mac_arm64: GOOS=darwin
|
||||
build_mac_arm64: GOARCH=arm64
|
||||
|
||||
build_linux_arm64: GOOS=linux
|
||||
build_linux_arm64: GOARCH=arm64
|
||||
|
||||
build_win_arm64: GOOS=windows
|
||||
build_win_arm64: GOARCH=arm64
|
||||
|
||||
build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64:
|
||||
$(eval BINDIR = bin-$(GOOS)-$(GOARCH)/ )
|
||||
$(MKDIR) $(BINDIR)
|
||||
env GOARCH=$(GOARCH) GOOS=$(GOOS) $(BUILD) -o $(BINDIR) $(LDFLAGS) -v ./cmd/...
|
||||
|
||||
|
||||
DISTDIR := csaf_distribution-$(SEMVER)
|
||||
dist: build_linux build_win
|
||||
DISTDIR := csaf-$(SEMVER)
|
||||
dist: build_linux build_linux_arm64 build_win build_win_arm64 build_mac_amd64 build_mac_arm64
|
||||
mkdir -p dist
|
||||
mkdir -p dist/$(DISTDIR)-windows-amd64/bin-windows-amd64
|
||||
mkdir -p dist/$(DISTDIR)-windows-arm64/bin-windows-arm64
|
||||
cp README.md dist/$(DISTDIR)-windows-amd64
|
||||
cp bin-windows-amd64/csaf_uploader.exe bin-windows-amd64/csaf_checker.exe dist/$(DISTDIR)-windows-amd64/bin-windows-amd64/
|
||||
cp README.md dist/$(DISTDIR)-windows-arm64
|
||||
cp bin-windows-amd64/csaf_uploader.exe bin-windows-amd64/csaf_validator.exe \
|
||||
bin-windows-amd64/csaf_checker.exe bin-windows-amd64/csaf_downloader.exe \
|
||||
dist/$(DISTDIR)-windows-amd64/bin-windows-amd64/
|
||||
cp bin-windows-arm64/csaf_uploader.exe bin-windows-arm64/csaf_validator.exe \
|
||||
bin-windows-arm64/csaf_checker.exe bin-windows-arm64/csaf_downloader.exe \
|
||||
dist/$(DISTDIR)-windows-arm64/bin-windows-arm64/
|
||||
mkdir -p dist/$(DISTDIR)-windows-amd64/docs
|
||||
cp docs/csaf_uploader.md docs/csaf_checker.md dist/$(DISTDIR)-windows-amd64/docs
|
||||
mkdir -p dist/$(DISTDIR)-windows-arm64/docs
|
||||
cp docs/csaf_uploader.md docs/csaf_validator.md docs/csaf_checker.md \
|
||||
docs/csaf_downloader.md dist/$(DISTDIR)-windows-amd64/docs
|
||||
cp docs/csaf_uploader.md docs/csaf_validator.md docs/csaf_checker.md \
|
||||
docs/csaf_downloader.md dist/$(DISTDIR)-windows-arm64/docs
|
||||
mkdir -p dist/$(DISTDIR)-macos/bin-darwin-amd64 \
|
||||
dist/$(DISTDIR)-macos/bin-darwin-arm64 \
|
||||
dist/$(DISTDIR)-macos/docs
|
||||
for f in csaf_downloader csaf_checker csaf_validator csaf_uploader ; do \
|
||||
cp bin-darwin-amd64/$$f dist/$(DISTDIR)-macos/bin-darwin-amd64 ; \
|
||||
cp bin-darwin-arm64/$$f dist/$(DISTDIR)-macos/bin-darwin-arm64 ; \
|
||||
cp docs/$${f}.md dist/$(DISTDIR)-macos/docs ; \
|
||||
done
|
||||
mkdir dist/$(DISTDIR)-gnulinux-amd64
|
||||
cp -r README.md docs bin-linux-amd64 dist/$(DISTDIR)-gnulinux-amd64
|
||||
mkdir dist/$(DISTDIR)-gnulinux-arm64
|
||||
cp -r README.md bin-linux-amd64 dist/$(DISTDIR)-gnulinux-amd64
|
||||
cp -r README.md bin-linux-arm64 dist/$(DISTDIR)-gnulinux-arm64
|
||||
# adjust which docs to copy
|
||||
mkdir -p dist/tmp_docs
|
||||
cp -r docs/examples dist/tmp_docs
|
||||
cp docs/*.md dist/tmp_docs
|
||||
cp -r dist/tmp_docs dist/$(DISTDIR)-gnulinux-amd64/docs
|
||||
cp -r dist/tmp_docs dist/$(DISTDIR)-gnulinux-arm64/docs
|
||||
rm -rf dist/tmp_docs
|
||||
cd dist/ ; zip -r $(DISTDIR)-windows-amd64.zip $(DISTDIR)-windows-amd64/
|
||||
cd dist/ ; zip -r $(DISTDIR)-windows-arm64.zip $(DISTDIR)-windows-arm64/
|
||||
cd dist/ ; tar -cvmlzf $(DISTDIR)-gnulinux-amd64.tar.gz $(DISTDIR)-gnulinux-amd64/
|
||||
cd dist/ ; tar -cvmlzf $(DISTDIR)-gnulinux-arm64.tar.gz $(DISTDIR)-gnulinux-arm64/
|
||||
cd dist/ ; tar -cvmlzf $(DISTDIR)-macos.tar.gz $(DISTDIR)-macos
|
||||
|
||||
# Remove bin-*-* and dist directories
|
||||
mostlyclean:
|
||||
|
|
|
|||
110
README.md
110
README.md
|
|
@ -1,53 +1,101 @@
|
|||
# csaf_distribution
|
||||
<!--
|
||||
This file is Free Software under the Apache-2.0 License
|
||||
without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
|
||||
An implementation of a [CSAF 2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/csd02/csaf-v2.0-csd02.html) trusted provider, checker and aggregator. Includes an uploader command line tool for the trusted provider.
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
Status: Beta (ready for more testing, but known short comings, see issues)
|
||||
SPDX-FileCopyrightText: 2024 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
Software-Engineering: 2024 Intevation GmbH <https://intevation.de>
|
||||
-->
|
||||
|
||||
|
||||
## [csaf_provider](docs/csaf_provider.md)
|
||||
|
||||
# csaf
|
||||
|
||||
Implements a [CSAF](https://oasis-open.github.io/csaf-documentation/)
|
||||
([specification v2.0](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html)
|
||||
and its [errata](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html))
|
||||
trusted provider, checker, aggregator and downloader.
|
||||
Includes an uploader command line tool for the trusted provider.
|
||||
|
||||
## Tools for users
|
||||
### [csaf_downloader](docs/csaf_downloader.md)
|
||||
is a tool for downloading advisories from a provider.
|
||||
Can be used for automated forwarding of CSAF documents.
|
||||
|
||||
### [csaf_validator](docs/csaf_validator.md)
|
||||
is a tool to validate local advisories files against the JSON Schema and an optional remote validator.
|
||||
|
||||
## Tools for advisory providers
|
||||
|
||||
### [csaf_provider](docs/csaf_provider.md)
|
||||
is an implementation of the role CSAF Trusted Provider, also offering
|
||||
a simple HTTPS based management service.
|
||||
|
||||
## [csaf_uploader](docs/csaf_uploader.md)
|
||||
is a command line tool that uploads CSAF documents to the `csaf_provider`.
|
||||
### [csaf_uploader](docs/csaf_uploader.md)
|
||||
is a command line tool to upload CSAF documents to the `csaf_provider`.
|
||||
|
||||
## [csaf_aggregator](docs/csaf_aggregator.md)
|
||||
is an implementation of the role CSAF Aggregator.
|
||||
|
||||
## [csaf_checker](docs/csaf_checker.md)
|
||||
### [csaf_checker](docs/csaf_checker.md)
|
||||
is a tool for testing a CSAF Trusted Provider according to [Section 7 of the CSAF standard](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.html#7-distributing-csaf-documents).
|
||||
|
||||
## Setup
|
||||
Note that the server side is only tested
|
||||
and the binaries available for GNU/Linux-Systems, e.g. Ubuntu LTS.
|
||||
It is likely to run on similar systems when build from sources.
|
||||
### [csaf_aggregator](docs/csaf_aggregator.md)
|
||||
is a CSAF Aggregator, to list or mirror providers.
|
||||
|
||||
|
||||
## Use as go library
|
||||
|
||||
The modules of this repository can be used as library by other Go applications. [ISDuBA](https://github.com/ISDuBA/ISDuBA) does so, for example.
|
||||
But there is only limited support and thus it is _not officially supported_.
|
||||
There are plans to change this without a concrete schedule within a future major release, e.g. see [#367](https://github.com/gocsaf/csaf/issues/367).
|
||||
|
||||
Initially envisioned as a toolbox, it was not constructed as a library,
|
||||
and to name one issue, exposes too many functions.
|
||||
This leads to problems like [#634](https://github.com/gocsaf/csaf/issues/634), where we have to accept that with 3.2.0 there was an unintended API change.
|
||||
|
||||
### [examples](./examples/README.md)
|
||||
are small examples of how to use `github.com/gocsaf/csaf` as an API. Currently this is a work in progress.
|
||||
|
||||
|
||||
## Setup
|
||||
Binaries for the server side are only available and tested
|
||||
for GNU/Linux-Systems, e.g. Ubuntu LTS.
|
||||
They are likely to run on similar systems when build from sources.
|
||||
|
||||
The windows binary package only includes
|
||||
`csaf_downloader`, `csaf_validator`, `csaf_checker` and `csaf_uploader`.
|
||||
|
||||
The MacOS binary archives come with the same set of client tools
|
||||
and are _community supported_. Which means:
|
||||
while they are expected to run fine,
|
||||
they are not at the same level of testing and maintenance
|
||||
as the Windows and GNU/Linux binaries.
|
||||
|
||||
The windows binaries only include `csaf_uploader` and `csaf_checker`.
|
||||
|
||||
### Prebuild binaries
|
||||
|
||||
Download the binaries (from the most recent release assets on Github).
|
||||
Download the binaries from the most recent release assets on Github.
|
||||
|
||||
|
||||
### Build from sources
|
||||
|
||||
- A recent version of **Go** (1.17+) should be installed. [Go installation](https://go.dev/doc/install)
|
||||
- Needs a [supported version](docs/Development.md) of **Go** to be installed.
|
||||
[Go installation](https://go.dev/doc/install)
|
||||
|
||||
- Clone the repository `git clone https://github.com/csaf-poc/csaf_distribution.git `
|
||||
- Clone the repository `git clone https://github.com/gocsaf/csaf.git `
|
||||
|
||||
- Build Go components Makefile supplies the following targets:
|
||||
- Build For GNU/Linux System: `make build_linux`
|
||||
- Build For Windows System (cross build): `make build_win`
|
||||
- Build For both linux and windows: `make build`
|
||||
- Build from a specific github tag by passing the intended tag to the `BUILDTAG` variable.
|
||||
- Build for GNU/Linux system: `make build_linux`
|
||||
- Build for Windows system (cross build): `make build_win`
|
||||
- Build for macOS system on Intel Processor (AMD64) (cross build): `make build_mac_amd64`
|
||||
- Build for macOS system on Apple Silicon (ARM64) (cross build): `make build_mac_arm64`
|
||||
- Build For GNU/Linux, macOS and Windows: `make build`
|
||||
- Build from a specific git tag by passing the intended tag to the `BUILDTAG` variable.
|
||||
E.g. `make BUILDTAG=v1.0.0 build` or `make BUILDTAG=1 build_linux`.
|
||||
The special value `1` means checking out the highest github tag for the build.
|
||||
The special value `1` means checking out the highest git tag for the build.
|
||||
- Remove the generated binaries und their directories: `make mostlyclean`
|
||||
|
||||
Binaries will be placed in directories named like `bin-linux-amd64/` and `bin-windows-amd64/`.
|
||||
|
||||
|
||||
### Setup (Trusted Provider)
|
||||
|
||||
- [Install](https://nginx.org/en/docs/install.html) **nginx**
|
||||
|
|
@ -55,10 +103,22 @@ Binaries will be placed in directories named like `bin-linux-amd64/` and `bin-wi
|
|||
- To configure nginx see [docs/provider-setup.md](docs/provider-setup.md)
|
||||
- To configure nginx for client certificate authentication see [docs/client-certificate-setup.md](docs/client-certificate-setup.md)
|
||||
|
||||
### Development
|
||||
|
||||
For further details of the development process consult our [development page](./docs/Development.md).
|
||||
|
||||
## Previous repo URLs
|
||||
|
||||
> [!NOTE]
|
||||
> To avoid future breakage, if you have `csaf-poc` in some of your URLs:
|
||||
> 1. Adjust your HTML links.
|
||||
> 2. Adjust your go module paths, see [#579](https://github.com/gocsaf/csaf/issues/579#issuecomment-2497244379).
|
||||
>
|
||||
> (This repository was moved here from https://github.com/csaf-poc/csaf_distribution on 2024-10-28. The old one is deprecated and redirection will be switched off sometime in 2025.)
|
||||
|
||||
## License
|
||||
|
||||
- csaf_distribution is licensed as Free Software under MIT License.
|
||||
- `csaf` is licensed as Free Software under the terms of the [Apache License, Version 2.0](./LICENSES/Apache-2.0.txt).
|
||||
|
||||
- See the specific source files
|
||||
for details, the license itself can be found in the directory `LICENSES/`.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -10,23 +10,25 @@ package main
|
|||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
var errNotFound = errors.New("not found")
|
||||
|
||||
func downloadJSON(c util.Client, url string, found func(io.Reader) error) error {
|
||||
res, err := c.Get(url)
|
||||
if err != nil || res.StatusCode != http.StatusOK ||
|
||||
if err != nil {
|
||||
return fmt.Errorf("not found: %w", err)
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != http.StatusOK ||
|
||||
res.Header.Get("Content-Type") != "application/json" {
|
||||
// ignore this as it is expected.
|
||||
return errNotFound
|
||||
}
|
||||
return func() error {
|
||||
defer res.Body.Close()
|
||||
return found(res.Body)
|
||||
}()
|
||||
}
|
||||
|
|
|
|||
67
cmd/csaf_aggregator/client_test.go
Normal file
67
cmd/csaf_aggregator/client_test.go
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
func Test_downloadJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
statusCode int
|
||||
contentType string
|
||||
wantErr error
|
||||
}{
|
||||
{
|
||||
name: "status ok, application/json",
|
||||
statusCode: http.StatusOK,
|
||||
contentType: "application/json",
|
||||
wantErr: nil,
|
||||
},
|
||||
{
|
||||
name: "status found, application/json",
|
||||
statusCode: http.StatusFound,
|
||||
contentType: "application/json",
|
||||
wantErr: errNotFound,
|
||||
},
|
||||
{
|
||||
name: "status ok, application/xml",
|
||||
statusCode: http.StatusOK,
|
||||
contentType: "application/xml",
|
||||
wantErr: errNotFound,
|
||||
},
|
||||
}
|
||||
|
||||
t.Parallel()
|
||||
for _, testToRun := range tests {
|
||||
test := testToRun
|
||||
t.Run(test.name, func(tt *testing.T) {
|
||||
tt.Parallel()
|
||||
found := func(_ io.Reader) error {
|
||||
return nil
|
||||
}
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
|
||||
w.Header().Add("Content-Type", test.contentType)
|
||||
w.WriteHeader(test.statusCode)
|
||||
}))
|
||||
defer server.Close()
|
||||
hClient := http.Client{}
|
||||
client := util.Client(&hClient)
|
||||
if gotErr := downloadJSON(client, server.URL, found); gotErr != test.wantErr {
|
||||
t.Errorf("downloadJSON: Expected %q but got %q.", test.wantErr, gotErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -12,24 +12,30 @@ import (
|
|||
"crypto/tls"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
"runtime"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/internal/certs"
|
||||
"github.com/gocsaf/csaf/v3/internal/filter"
|
||||
"github.com/gocsaf/csaf/v3/internal/models"
|
||||
"github.com/gocsaf/csaf/v3/internal/options"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
"golang.org/x/time/rate"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultConfigPath = "aggregator.toml"
|
||||
defaultWorkers = 10
|
||||
defaultFolder = "/var/www"
|
||||
defaultWeb = "/var/www/html"
|
||||
defaultDomain = "https://example.com"
|
||||
defaultUpdateInterval = "on best effort"
|
||||
defaultLockFile = "/var/lock/csaf_aggregator/lock"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
|
|
@ -38,6 +44,29 @@ type provider struct {
|
|||
// Rate gives the provider specific rate limiting (see overall Rate).
|
||||
Rate *float64 `toml:"rate"`
|
||||
Insecure *bool `toml:"insecure"`
|
||||
WriteIndices *bool `toml:"write_indices"`
|
||||
Categories *[]string `toml:"categories"`
|
||||
// ServiceDocument incidates if we should create a service.json document.
|
||||
ServiceDocument *bool `toml:"create_service_document"`
|
||||
AggregatoryCategory *csaf.AggregatorCategory `toml:"category"`
|
||||
|
||||
// UpdateInterval is as the mandatory `update_interval` if this is a publisher.
|
||||
UpdateInterval *string `toml:"update_interval"`
|
||||
|
||||
// IgnorePattern is a list of patterns of advisory URLs to be ignored.
|
||||
IgnorePattern []string `toml:"ignore_pattern"`
|
||||
|
||||
// ExtraHeader adds extra HTTP header fields to client
|
||||
ExtraHeader http.Header `toml:"header"`
|
||||
|
||||
ClientCert *string `toml:"client_cert"`
|
||||
ClientKey *string `toml:"client_key"`
|
||||
ClientPassphrase *string `toml:"client_passphrase"`
|
||||
|
||||
Range *models.TimeRange `toml:"time_range"`
|
||||
|
||||
clientCerts []tls.Certificate
|
||||
ignorePattern filter.PatternMatcher
|
||||
}
|
||||
|
||||
type config struct {
|
||||
|
|
@ -50,6 +79,8 @@ type config struct {
|
|||
// Rate gives the average upper limit of https operations per second.
|
||||
Rate *float64 `toml:"rate"`
|
||||
Insecure *bool `toml:"insecure"`
|
||||
Categories *[]string `toml:"categories"`
|
||||
WriteIndices bool `toml:"write_indices"`
|
||||
Aggregator csaf.AggregatorInfo `toml:"aggregator"`
|
||||
Providers []*provider `toml:"providers"`
|
||||
OpenPGPPrivateKey string `toml:"openpgp_private_key"`
|
||||
|
|
@ -57,19 +88,156 @@ type config struct {
|
|||
Passphrase *string `toml:"passphrase"`
|
||||
AllowSingleProvider bool `toml:"allow_single_provider"`
|
||||
|
||||
ClientCert *string `toml:"client_cert"`
|
||||
ClientKey *string `toml:"client_key"`
|
||||
ClientPassphrase *string `toml:"client_passphrase"`
|
||||
|
||||
Range *models.TimeRange `long:"time_range" short:"t" description:"RANGE of time from which advisories to download" value-name:"RANGE" toml:"time_range"`
|
||||
|
||||
// LockFile tries to lock to a given file.
|
||||
LockFile *string `toml:"lock_file"`
|
||||
|
||||
// Interim performs an interim scan.
|
||||
Interim bool `toml:"interim"`
|
||||
Interim bool `short:"i" long:"interim" description:"Perform an interim scan" toml:"interim"`
|
||||
Version bool `long:"version" description:"Display version of the binary" toml:"-"`
|
||||
|
||||
// InterimYears is numbers numbers of years to look back
|
||||
// for interim advisories. Less/equal zero means forever.
|
||||
InterimYears int `toml:"interim_years"`
|
||||
|
||||
// RemoteValidator configures an optional remote validation.
|
||||
RemoteValidatorOptions *csaf.RemoteValidatorOptions `toml:"remote_validator"`
|
||||
|
||||
// ServiceDocument incidates if we should create a service.json document.
|
||||
ServiceDocument bool `toml:"create_service_document"`
|
||||
|
||||
// UpdateInterval is used for publishers as the mandatory field
|
||||
// 'update_interval'.
|
||||
UpdateInterval *string `toml:"update_interval"`
|
||||
|
||||
// IgnorePattern is a list of patterns of advisory URLs to be ignored.
|
||||
IgnorePattern []string `toml:"ignore_pattern"`
|
||||
|
||||
// ExtraHeader adds extra HTTP header fields to client
|
||||
ExtraHeader http.Header `toml:"header"`
|
||||
|
||||
Config string `short:"c" long:"config" description:"Path to config TOML file" value-name:"TOML-FILE" toml:"-"`
|
||||
|
||||
keyMu sync.Mutex
|
||||
key *crypto.Key
|
||||
keyErr error
|
||||
|
||||
clientCerts []tls.Certificate
|
||||
ignorePattern filter.PatternMatcher
|
||||
}
|
||||
|
||||
// configPaths are the potential file locations of the config file.
|
||||
var configPaths = []string{
|
||||
"~/.config/csaf/aggregator.toml",
|
||||
"~/.csaf_aggregator.toml",
|
||||
"csaf_aggregator.toml",
|
||||
}
|
||||
|
||||
// parseArgsConfig parse the command arguments and loads configuration
|
||||
// from a configuration file.
|
||||
func parseArgsConfig() ([]string, *config, error) {
|
||||
p := options.Parser[config]{
|
||||
DefaultConfigLocations: configPaths,
|
||||
ConfigLocation: func(cfg *config) string {
|
||||
return cfg.Config
|
||||
},
|
||||
HasVersion: func(cfg *config) bool { return cfg.Version },
|
||||
// Establish default values if not set.
|
||||
EnsureDefaults: (*config).setDefaults,
|
||||
}
|
||||
return p.Parse()
|
||||
}
|
||||
|
||||
// tooOldForInterims returns a function that tells if a given
|
||||
// time is too old for the configured interims interval.
|
||||
func (c *config) tooOldForInterims() func(time.Time) bool {
|
||||
if c.InterimYears <= 0 {
|
||||
return func(time.Time) bool { return false }
|
||||
}
|
||||
from := time.Now().AddDate(-c.InterimYears, 0, 0)
|
||||
return func(t time.Time) bool { return t.Before(from) }
|
||||
}
|
||||
|
||||
// ageAccept returns a function which checks if a given time
|
||||
// is in the accepted download interval of the provider or
|
||||
// the global config.
|
||||
func (p *provider) ageAccept(c *config) func(time.Time) bool {
|
||||
var r *models.TimeRange
|
||||
switch {
|
||||
case p.Range != nil:
|
||||
r = p.Range
|
||||
case c.Range != nil:
|
||||
r = c.Range
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
|
||||
if c.Verbose {
|
||||
slog.Debug(
|
||||
"Setting up filter to accept advisories within time range",
|
||||
"from", r[0].Format(time.RFC3339),
|
||||
"to", r[1].Format(time.RFC3339),
|
||||
)
|
||||
}
|
||||
return r.Contains
|
||||
}
|
||||
|
||||
// ignoreFile returns true if the given URL should not be downloaded.
|
||||
func (p *provider) ignoreURL(u string, c *config) bool {
|
||||
return p.ignorePattern.Matches(u) || c.ignorePattern.Matches(u)
|
||||
}
|
||||
|
||||
// updateInterval returns the update interval of a publisher.
|
||||
func (p *provider) updateInterval(c *config) string {
|
||||
if p.UpdateInterval != nil {
|
||||
return *p.UpdateInterval
|
||||
}
|
||||
if c.UpdateInterval != nil {
|
||||
return *c.UpdateInterval
|
||||
}
|
||||
return defaultUpdateInterval
|
||||
}
|
||||
|
||||
// serviceDocument tells if we should generate a service document for a
|
||||
// given provider.
|
||||
func (p *provider) serviceDocument(c *config) bool {
|
||||
if p.ServiceDocument != nil {
|
||||
return *p.ServiceDocument
|
||||
}
|
||||
return c.ServiceDocument
|
||||
}
|
||||
|
||||
// writeIndices tells if we should write index.txt and changes.csv.
|
||||
func (p *provider) writeIndices(c *config) bool {
|
||||
if p.WriteIndices != nil {
|
||||
return *p.WriteIndices
|
||||
}
|
||||
return c.WriteIndices
|
||||
}
|
||||
|
||||
func (p *provider) runAsMirror(c *config) bool {
|
||||
if p.AggregatoryCategory != nil {
|
||||
return *p.AggregatoryCategory == csaf.AggregatorAggregator
|
||||
}
|
||||
return c.runAsMirror()
|
||||
}
|
||||
|
||||
// atLeastNMirrors checks if there are at least n mirrors configured.
|
||||
func (c *config) atLeastNMirrors(n int) bool {
|
||||
var mirrors int
|
||||
for _, p := range c.Providers {
|
||||
if p.runAsMirror(c) {
|
||||
if mirrors++; mirrors >= n {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// runAsMirror determines if the aggregator should run in mirror mode.
|
||||
|
|
@ -96,23 +264,62 @@ func (c *config) privateOpenPGPKey() (*crypto.Key, error) {
|
|||
return c.key, c.keyErr
|
||||
}
|
||||
|
||||
// httpLog does structured logging in a [util.LoggingClient].
|
||||
func httpLog(method, url string) {
|
||||
slog.Debug("http",
|
||||
"method", method,
|
||||
"url", url)
|
||||
}
|
||||
|
||||
func (c *config) httpClient(p *provider) util.Client {
|
||||
|
||||
hClient := http.Client{}
|
||||
|
||||
var tlsConfig tls.Config
|
||||
if p.Insecure != nil && *p.Insecure || c.Insecure != nil && *c.Insecure {
|
||||
hClient.Transport = &http.Transport{
|
||||
TLSClientConfig: &tls.Config{
|
||||
InsecureSkipVerify: true,
|
||||
},
|
||||
}
|
||||
tlsConfig.InsecureSkipVerify = true
|
||||
}
|
||||
|
||||
var client util.Client
|
||||
// Use client certs if needed.
|
||||
switch {
|
||||
// Provider has precedence over global.
|
||||
case len(p.clientCerts) != 0:
|
||||
tlsConfig.Certificates = p.clientCerts
|
||||
case len(c.clientCerts) != 0:
|
||||
tlsConfig.Certificates = c.clientCerts
|
||||
}
|
||||
|
||||
hClient.Transport = &http.Transport{
|
||||
TLSClientConfig: &tlsConfig,
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
}
|
||||
|
||||
client := util.Client(&hClient)
|
||||
|
||||
// Add extra headers.
|
||||
switch {
|
||||
// Provider has precedence over global.
|
||||
case len(p.ExtraHeader) > 0:
|
||||
client = &util.HeaderClient{
|
||||
Client: client,
|
||||
Header: p.ExtraHeader,
|
||||
}
|
||||
case len(c.ExtraHeader) > 0:
|
||||
client = &util.HeaderClient{
|
||||
Client: client,
|
||||
Header: c.ExtraHeader,
|
||||
}
|
||||
default:
|
||||
client = &util.HeaderClient{
|
||||
Client: client,
|
||||
Header: http.Header{},
|
||||
}
|
||||
}
|
||||
|
||||
if c.Verbose {
|
||||
client = &util.LoggingClient{Client: &hClient}
|
||||
} else {
|
||||
client = &hClient
|
||||
client = &util.LoggingClient{
|
||||
Client: client,
|
||||
Log: httpLog,
|
||||
}
|
||||
}
|
||||
|
||||
if p.Rate == nil && c.Rate == nil {
|
||||
|
|
@ -133,12 +340,11 @@ func (c *config) httpClient(p *provider) util.Client {
|
|||
}
|
||||
|
||||
func (c *config) checkProviders() error {
|
||||
|
||||
if !c.AllowSingleProvider && len(c.Providers) < 2 {
|
||||
return errors.New("need at least two providers")
|
||||
}
|
||||
|
||||
already := make(map[string]bool)
|
||||
already := util.Set[string]{}
|
||||
|
||||
for _, p := range c.Providers {
|
||||
if p.Name == "" {
|
||||
|
|
@ -147,14 +353,28 @@ func (c *config) checkProviders() error {
|
|||
if p.Domain == "" {
|
||||
return errors.New("no domain given for provider")
|
||||
}
|
||||
if already[p.Name] {
|
||||
if already.Contains(p.Name) {
|
||||
return fmt.Errorf("provider '%s' is configured more than once", p.Name)
|
||||
}
|
||||
already[p.Name] = true
|
||||
already.Add(p.Name)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *config) checkMirror() error {
|
||||
if c.runAsMirror() {
|
||||
if !c.AllowSingleProvider && !c.atLeastNMirrors(2) {
|
||||
return errors.New("at least 2 providers need to be mirrored")
|
||||
} else if c.AllowSingleProvider && !c.atLeastNMirrors(1) {
|
||||
return errors.New("at least one provider must be mirrored")
|
||||
}
|
||||
} else if !c.AllowSingleProvider && c.atLeastNMirrors(1) {
|
||||
return errors.New("found mirrors in a lister aggregator")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *config) setDefaults() {
|
||||
if c.Folder == "" {
|
||||
c.Folder = defaultFolder
|
||||
|
|
@ -168,6 +388,14 @@ func (c *config) setDefaults() {
|
|||
c.Domain = defaultDomain
|
||||
}
|
||||
|
||||
switch {
|
||||
case c.LockFile == nil:
|
||||
lockFile := defaultLockFile
|
||||
c.LockFile = &lockFile
|
||||
case *c.LockFile == "":
|
||||
c.LockFile = nil
|
||||
}
|
||||
|
||||
if c.Workers <= 0 {
|
||||
if n := runtime.NumCPU(); n > defaultWorkers {
|
||||
c.Workers = defaultWorkers
|
||||
|
|
@ -181,33 +409,90 @@ func (c *config) setDefaults() {
|
|||
}
|
||||
}
|
||||
|
||||
func (c *config) check() error {
|
||||
// prepareLogging sets up the structured logging.
|
||||
func (c *config) prepareLogging() error {
|
||||
ho := slog.HandlerOptions{
|
||||
Level: slog.LevelDebug,
|
||||
}
|
||||
handler := slog.NewTextHandler(os.Stdout, &ho)
|
||||
logger := slog.New(handler)
|
||||
slog.SetDefault(logger)
|
||||
return nil
|
||||
}
|
||||
|
||||
// compileIgnorePatterns compiles the configured patterns to be ignored.
|
||||
func (p *provider) compileIgnorePatterns() error {
|
||||
pm, err := filter.NewPatternMatcher(p.IgnorePattern)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid ignore patterns for %q: %w", p.Name, err)
|
||||
}
|
||||
p.ignorePattern = pm
|
||||
return nil
|
||||
}
|
||||
|
||||
// compileIgnorePatterns compiles the configured patterns to be ignored.
|
||||
func (c *config) compileIgnorePatterns() error {
|
||||
// Compile the top level patterns.
|
||||
pm, err := filter.NewPatternMatcher(c.IgnorePattern)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.ignorePattern = pm
|
||||
// Compile the patterns of the providers.
|
||||
for _, p := range c.Providers {
|
||||
if err := p.compileIgnorePatterns(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// prepareCertificates loads the provider specific client side certificates
|
||||
// used by the HTTP client.
|
||||
func (p *provider) prepareCertificates() error {
|
||||
cert, err := certs.LoadCertificate(
|
||||
p.ClientCert, p.ClientKey, p.ClientPassphrase)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid certificates for %q: %w", p.Name, err)
|
||||
}
|
||||
p.clientCerts = cert
|
||||
return nil
|
||||
}
|
||||
|
||||
// prepareCertificates loads the client side certificates used by the HTTP client.
|
||||
func (c *config) prepareCertificates() error {
|
||||
// Global certificates
|
||||
cert, err := certs.LoadCertificate(
|
||||
c.ClientCert, c.ClientKey, c.ClientPassphrase)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.clientCerts = cert
|
||||
// Provider certificates
|
||||
for _, p := range c.Providers {
|
||||
if err := p.prepareCertificates(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// prepare prepares internal state of a loaded configuration.
|
||||
func (c *config) prepare() error {
|
||||
if len(c.Providers) == 0 {
|
||||
return errors.New("no providers given in configuration")
|
||||
}
|
||||
|
||||
if err := c.Aggregator.Validate(); err != nil {
|
||||
for _, prepare := range []func() error{
|
||||
c.prepareCertificates,
|
||||
c.compileIgnorePatterns,
|
||||
c.Aggregator.Validate,
|
||||
c.checkProviders,
|
||||
c.checkMirror,
|
||||
} {
|
||||
if err := prepare(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.checkProviders()
|
||||
}
|
||||
|
||||
func loadConfig(path string) (*config, error) {
|
||||
if path == "" {
|
||||
path = defaultConfigPath
|
||||
}
|
||||
|
||||
var cfg config
|
||||
if _, err := toml.DecodeFile(path, &cfg); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cfg.setDefaults()
|
||||
|
||||
if err := cfg.check(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &cfg, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +0,0 @@
|
|||
// csaf_aggregator is an implementation of the role CSAF Aggregator of the
|
||||
// CSAF 2.0 specification
|
||||
// (https://docs.oasis-open.org/csaf/csaf/v2.0/csd02/csaf-v2.0-csd02.html)
|
||||
//
|
||||
// TODO: To be called periodically, e.g with cron
|
||||
|
||||
package main
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -11,32 +11,36 @@ package main
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
type fullJob struct {
|
||||
provider *provider
|
||||
aggregatorProvider *csaf.AggregatorCSAFProvider
|
||||
work fullWorkFunc
|
||||
err error
|
||||
}
|
||||
|
||||
// setupProviderFull fetches the provider-metadate.json for a specific provider.
|
||||
// setupProviderFull fetches the provider-metadata.json for a specific provider.
|
||||
func (w *worker) setupProviderFull(provider *provider) error {
|
||||
log.Printf("worker #%d: %s (%s)\n",
|
||||
w.num, provider.Name, provider.Domain)
|
||||
|
||||
w.log.Info("Setting up provider",
|
||||
"provider", slog.GroupValue(
|
||||
slog.String("name", provider.Name),
|
||||
slog.String("domain", provider.Domain),
|
||||
))
|
||||
w.dir = ""
|
||||
w.provider = provider
|
||||
|
||||
// Each job needs a separate client.
|
||||
w.client = w.cfg.httpClient(provider)
|
||||
w.client = w.processor.cfg.httpClient(provider)
|
||||
|
||||
// We need the provider metadata in all cases.
|
||||
if err := w.locateProviderMetadata(provider.Domain); err != nil {
|
||||
|
|
@ -53,7 +57,7 @@ func (w *worker) setupProviderFull(provider *provider) error {
|
|||
"provider-metadata.json has %d validation issues", len(errors))
|
||||
}
|
||||
|
||||
log.Printf("provider-metadata: %s\n", w.loc)
|
||||
w.log.Info("Using provider-metadata", "url", w.loc)
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
@ -61,11 +65,7 @@ func (w *worker) setupProviderFull(provider *provider) error {
|
|||
type fullWorkFunc func(*worker) (*csaf.AggregatorCSAFProvider, error)
|
||||
|
||||
// fullWork handles the treatment of providers concurrently.
|
||||
func (w *worker) fullWork(
|
||||
wg *sync.WaitGroup,
|
||||
doWork fullWorkFunc,
|
||||
jobs <-chan *fullJob,
|
||||
) {
|
||||
func (w *worker) fullWork(wg *sync.WaitGroup, jobs <-chan *fullJob) {
|
||||
defer wg.Done()
|
||||
|
||||
for j := range jobs {
|
||||
|
|
@ -73,37 +73,59 @@ func (w *worker) fullWork(
|
|||
j.err = err
|
||||
continue
|
||||
}
|
||||
j.aggregatorProvider, j.err = doWork(w)
|
||||
j.aggregatorProvider, j.err = j.work(w)
|
||||
}
|
||||
}
|
||||
|
||||
// full performs the complete lister/download
|
||||
func (p *processor) full() error {
|
||||
|
||||
var doWork fullWorkFunc
|
||||
|
||||
if p.cfg.runAsMirror() {
|
||||
doWork = (*worker).mirror
|
||||
log.Println("Running in aggregator mode")
|
||||
p.log.Info("Running in aggregator mode")
|
||||
|
||||
// check if we need to setup a remote validator
|
||||
if p.cfg.RemoteValidatorOptions != nil {
|
||||
validator, err := p.cfg.RemoteValidatorOptions.Open()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Not sure if we really need it to be serialized.
|
||||
p.remoteValidator = csaf.SynchronizedRemoteValidator(validator)
|
||||
defer func() {
|
||||
p.remoteValidator.Close()
|
||||
p.remoteValidator = nil
|
||||
}()
|
||||
}
|
||||
} else {
|
||||
doWork = (*worker).lister
|
||||
log.Println("Running in lister mode")
|
||||
p.log.Info("Running in lister mode")
|
||||
}
|
||||
|
||||
queue := make(chan *fullJob)
|
||||
var wg sync.WaitGroup
|
||||
|
||||
log.Printf("Starting %d workers.\n", p.cfg.Workers)
|
||||
p.log.Info("Starting workers...", "num", p.cfg.Workers)
|
||||
|
||||
for i := 1; i <= p.cfg.Workers; i++ {
|
||||
wg.Add(1)
|
||||
w := newWorker(i, p.cfg)
|
||||
go w.fullWork(&wg, doWork, queue)
|
||||
w := newWorker(i, p)
|
||||
|
||||
go w.fullWork(&wg, queue)
|
||||
}
|
||||
|
||||
jobs := make([]fullJob, len(p.cfg.Providers))
|
||||
|
||||
for i, p := range p.cfg.Providers {
|
||||
jobs[i] = fullJob{provider: p}
|
||||
for i, provider := range p.cfg.Providers {
|
||||
var work fullWorkFunc
|
||||
if provider.runAsMirror(p.cfg) {
|
||||
work = (*worker).mirror
|
||||
} else {
|
||||
work = (*worker).lister
|
||||
}
|
||||
jobs[i] = fullJob{
|
||||
provider: provider,
|
||||
work: work,
|
||||
}
|
||||
queue <- &jobs[i]
|
||||
}
|
||||
close(queue)
|
||||
|
|
@ -111,24 +133,45 @@ func (p *processor) full() error {
|
|||
wg.Wait()
|
||||
|
||||
// Assemble aggregator data structure.
|
||||
|
||||
csafProviders := make([]*csaf.AggregatorCSAFProvider, 0, len(jobs))
|
||||
var providers []*csaf.AggregatorCSAFProvider
|
||||
var publishers []*csaf.AggregatorCSAFPublisher
|
||||
|
||||
for i := range jobs {
|
||||
j := &jobs[i]
|
||||
if j.err != nil {
|
||||
log.Printf("error: '%s' failed: %v\n", j.provider.Name, j.err)
|
||||
p.log.Error("Job execution failed",
|
||||
slog.Group("job",
|
||||
slog.Group("provider"),
|
||||
"name", j.provider.Name,
|
||||
),
|
||||
"err", j.err,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if j.aggregatorProvider == nil {
|
||||
log.Printf(
|
||||
"error: '%s' does not produce any result.\n", j.provider.Name)
|
||||
p.log.Error("Job did not produce any result",
|
||||
slog.Group("job",
|
||||
slog.Group("provider"),
|
||||
"name", j.provider.Name,
|
||||
),
|
||||
)
|
||||
continue
|
||||
}
|
||||
csafProviders = append(csafProviders, j.aggregatorProvider)
|
||||
|
||||
// "https://" signals a publisher.
|
||||
if strings.HasPrefix(j.provider.Domain, "https://") {
|
||||
pub := &csaf.AggregatorCSAFPublisher{
|
||||
Metadata: j.aggregatorProvider.Metadata,
|
||||
Mirrors: j.aggregatorProvider.Mirrors,
|
||||
UpdateInterval: j.provider.updateInterval(p.cfg),
|
||||
}
|
||||
publishers = append(publishers, pub)
|
||||
} else {
|
||||
providers = append(providers, j.aggregatorProvider)
|
||||
}
|
||||
}
|
||||
|
||||
if len(csafProviders) == 0 {
|
||||
if len(providers)+len(publishers) == 0 {
|
||||
return errors.New("all jobs failed, stopping")
|
||||
}
|
||||
|
||||
|
|
@ -136,13 +179,14 @@ func (p *processor) full() error {
|
|||
canonicalURL := csaf.AggregatorURL(
|
||||
p.cfg.Domain + "/.well-known/csaf-aggregator/aggregator.json")
|
||||
|
||||
lastUpdated := csaf.TimeStamp(time.Now())
|
||||
lastUpdated := csaf.TimeStamp(time.Now().UTC())
|
||||
|
||||
agg := csaf.Aggregator{
|
||||
Aggregator: &p.cfg.Aggregator,
|
||||
Version: &version,
|
||||
CanonicalURL: &canonicalURL,
|
||||
CSAFProviders: csafProviders,
|
||||
CSAFProviders: providers,
|
||||
CSAFPublishers: publishers,
|
||||
LastUpdated: &lastUpdated,
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -12,7 +12,6 @@ import (
|
|||
"bufio"
|
||||
"encoding/csv"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
|
|
@ -20,8 +19,23 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
const (
|
||||
// interimsCSV is the name of the file to store the URLs
|
||||
// of the interim advisories.
|
||||
interimsCSV = "interims.csv"
|
||||
|
||||
// changesCSV is the name of the file to store the
|
||||
// the paths to the advisories sorted in descending order
|
||||
// of the release date along with the release date.
|
||||
changesCSV = "changes.csv"
|
||||
|
||||
// indexTXT is the name of the file to store the
|
||||
// the paths of the advisories.
|
||||
indexTXT = "index.txt"
|
||||
)
|
||||
|
||||
func (w *worker) writeInterims(label string, summaries []summary) error {
|
||||
|
|
@ -44,7 +58,7 @@ func (w *worker) writeInterims(label string, summaries []summary) error {
|
|||
ss[j].summary.CurrentReleaseDate)
|
||||
})
|
||||
|
||||
fname := filepath.Join(w.dir, label, "interim.csv")
|
||||
fname := filepath.Join(w.dir, label, interimsCSV)
|
||||
f, err := os.Create(fname)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -76,6 +90,22 @@ func (w *worker) writeInterims(label string, summaries []summary) error {
|
|||
|
||||
func (w *worker) writeCSV(label string, summaries []summary) error {
|
||||
|
||||
fname := filepath.Join(w.dir, label, changesCSV)
|
||||
|
||||
// If we don't have any entries remove existing file.
|
||||
if len(summaries) == 0 {
|
||||
// Does it really exist?
|
||||
if err := os.RemoveAll(fname); err != nil {
|
||||
return fmt.Errorf("unable to remove %q: %w", fname, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
f, err := os.Create(fname)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Do not sort in-place.
|
||||
ss := make([]summary, len(summaries))
|
||||
copy(ss, summaries)
|
||||
|
|
@ -85,21 +115,21 @@ func (w *worker) writeCSV(label string, summaries []summary) error {
|
|||
ss[j].summary.CurrentReleaseDate)
|
||||
})
|
||||
|
||||
fname := filepath.Join(w.dir, label, "changes.csv")
|
||||
f, err := os.Create(fname)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
out := csv.NewWriter(f)
|
||||
out := util.NewFullyQuotedCSWWriter(f)
|
||||
|
||||
record := make([]string, 2)
|
||||
|
||||
const (
|
||||
pathColumn = 0
|
||||
timeColumn = 1
|
||||
)
|
||||
|
||||
for i := range ss {
|
||||
s := &ss[i]
|
||||
record[0] =
|
||||
s.summary.CurrentReleaseDate.Format(time.RFC3339)
|
||||
record[1] =
|
||||
record[pathColumn] =
|
||||
strconv.Itoa(s.summary.InitialReleaseDate.Year()) + "/" + s.filename
|
||||
record[timeColumn] =
|
||||
s.summary.CurrentReleaseDate.Format(time.RFC3339)
|
||||
if err := out.Write(record); err != nil {
|
||||
f.Close()
|
||||
return err
|
||||
|
|
@ -116,7 +146,17 @@ func (w *worker) writeCSV(label string, summaries []summary) error {
|
|||
|
||||
func (w *worker) writeIndex(label string, summaries []summary) error {
|
||||
|
||||
fname := filepath.Join(w.dir, label, "index.txt")
|
||||
fname := filepath.Join(w.dir, label, indexTXT)
|
||||
|
||||
// If we don't have any entries remove existing file.
|
||||
if len(summaries) == 0 {
|
||||
// Does it really exist?
|
||||
if err := os.RemoveAll(fname); err != nil {
|
||||
return fmt.Errorf("unable to remove %q: %w", fname, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
f, err := os.Create(fname)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -137,14 +177,64 @@ func (w *worker) writeIndex(label string, summaries []summary) error {
|
|||
return err2
|
||||
}
|
||||
|
||||
func (w *worker) writeROLIENoSummaries(label string) error {
|
||||
|
||||
labelFolder := strings.ToLower(label)
|
||||
|
||||
fname := "csaf-feed-tlp-" + labelFolder + ".json"
|
||||
|
||||
feedURL, err := w.getProviderBaseURL()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
feedURL = feedURL.JoinPath(labelFolder, fname)
|
||||
|
||||
links := []csaf.Link{{
|
||||
Rel: "self",
|
||||
HRef: feedURL.String(),
|
||||
}}
|
||||
|
||||
if w.provider.serviceDocument(w.processor.cfg) {
|
||||
serviceURL, err := w.getProviderBaseURL()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
serviceURL = serviceURL.JoinPath("service.json")
|
||||
links = append(links, csaf.Link{
|
||||
Rel: "service",
|
||||
HRef: serviceURL.String(),
|
||||
})
|
||||
}
|
||||
|
||||
rolie := &csaf.ROLIEFeed{
|
||||
Feed: csaf.FeedData{
|
||||
ID: "csaf-feed-tlp-" + strings.ToLower(label),
|
||||
Title: "CSAF feed (TLP:" + strings.ToUpper(label) + ")",
|
||||
Link: links,
|
||||
Category: []csaf.ROLIECategory{{
|
||||
Scheme: "urn:ietf:params:rolie:category:information-type",
|
||||
Term: "csaf",
|
||||
}},
|
||||
Updated: csaf.TimeStamp(time.Now().UTC()),
|
||||
Entry: []*csaf.Entry{},
|
||||
},
|
||||
}
|
||||
|
||||
path := filepath.Join(w.dir, labelFolder, fname)
|
||||
return util.WriteToFile(path, rolie)
|
||||
}
|
||||
|
||||
func (w *worker) writeROLIE(label string, summaries []summary) error {
|
||||
|
||||
labelFolder := strings.ToLower(label)
|
||||
|
||||
fname := "csaf-feed-tlp-" + labelFolder + ".json"
|
||||
|
||||
feedURL := w.cfg.Domain + "/.well-known/csaf-aggregator/" +
|
||||
w.provider.Name + "/" + labelFolder + "/" + fname
|
||||
feedURL, err := w.getProviderBaseURL()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
feedURL = feedURL.JoinPath(labelFolder, fname)
|
||||
|
||||
entries := make([]*csaf.Entry, len(summaries))
|
||||
|
||||
|
|
@ -156,24 +246,29 @@ func (w *worker) writeROLIE(label string, summaries []summary) error {
|
|||
for i := range summaries {
|
||||
s := &summaries[i]
|
||||
|
||||
csafURL := w.cfg.Domain + "/.well-known/csaf-aggregator/" +
|
||||
w.provider.Name + "/" + label + "/" +
|
||||
strconv.Itoa(s.summary.InitialReleaseDate.Year()) + "/" +
|
||||
s.filename
|
||||
csafURL, err := w.getProviderBaseURL()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
csafURLString := csafURL.JoinPath(label,
|
||||
strconv.Itoa(s.summary.InitialReleaseDate.Year()),
|
||||
s.filename).String()
|
||||
|
||||
entries[i] = &csaf.Entry{
|
||||
ID: s.summary.ID,
|
||||
Titel: s.summary.Title,
|
||||
Published: csaf.TimeStamp(s.summary.InitialReleaseDate),
|
||||
Updated: csaf.TimeStamp(s.summary.CurrentReleaseDate),
|
||||
Link: []csaf.Link{{
|
||||
Rel: "self",
|
||||
HRef: csafURL,
|
||||
}},
|
||||
Link: []csaf.Link{
|
||||
{Rel: "self", HRef: csafURLString},
|
||||
{Rel: "hash", HRef: csafURLString + ".sha256"},
|
||||
{Rel: "hash", HRef: csafURLString + ".sha512"},
|
||||
{Rel: "signature", HRef: csafURLString + ".asc"},
|
||||
},
|
||||
Format: format,
|
||||
Content: csaf.Content{
|
||||
Type: "application/json",
|
||||
Src: csafURL,
|
||||
Src: csafURLString,
|
||||
},
|
||||
}
|
||||
if s.summary.Summary != "" {
|
||||
|
|
@ -183,19 +278,33 @@ func (w *worker) writeROLIE(label string, summaries []summary) error {
|
|||
}
|
||||
}
|
||||
|
||||
links := []csaf.Link{{
|
||||
Rel: "self",
|
||||
HRef: feedURL.String(),
|
||||
}}
|
||||
|
||||
if w.provider.serviceDocument(w.processor.cfg) {
|
||||
serviceURL, err := w.getProviderBaseURL()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
serviceURL = serviceURL.JoinPath("service.json")
|
||||
links = append(links, csaf.Link{
|
||||
Rel: "service",
|
||||
HRef: serviceURL.String(),
|
||||
})
|
||||
}
|
||||
|
||||
rolie := &csaf.ROLIEFeed{
|
||||
Feed: csaf.FeedData{
|
||||
ID: "csaf-feed-tlp-" + strings.ToLower(label),
|
||||
Title: "CSAF feed (TLP:" + strings.ToUpper(label) + ")",
|
||||
Link: []csaf.Link{{
|
||||
Rel: "self",
|
||||
HRef: feedURL,
|
||||
}},
|
||||
Link: links,
|
||||
Category: []csaf.ROLIECategory{{
|
||||
Scheme: "urn:ietf:params:rolie:category:information-type",
|
||||
Term: "csaf",
|
||||
}},
|
||||
Updated: csaf.TimeStamp(time.Now()),
|
||||
Updated: csaf.TimeStamp(time.Now().UTC()),
|
||||
Entry: entries,
|
||||
},
|
||||
}
|
||||
|
|
@ -207,50 +316,106 @@ func (w *worker) writeROLIE(label string, summaries []summary) error {
|
|||
return util.WriteToFile(path, rolie)
|
||||
}
|
||||
|
||||
func (w *worker) writeCategories(label string) error {
|
||||
categories := w.categories[label]
|
||||
if len(categories) == 0 {
|
||||
return nil
|
||||
}
|
||||
cats := make([]string, len(categories))
|
||||
var i int
|
||||
for cat := range categories {
|
||||
cats[i] = cat
|
||||
i++
|
||||
}
|
||||
rcd := csaf.NewROLIECategoryDocument(cats...)
|
||||
|
||||
labelFolder := strings.ToLower(label)
|
||||
fname := "category-" + labelFolder + ".json"
|
||||
path := filepath.Join(w.dir, labelFolder, fname)
|
||||
return util.WriteToFile(path, rcd)
|
||||
}
|
||||
|
||||
// writeService writes a service.json document if it is configured.
|
||||
func (w *worker) writeService() error {
|
||||
|
||||
if !w.provider.serviceDocument(w.processor.cfg) {
|
||||
return nil
|
||||
}
|
||||
labels := make([]string, len(w.summaries))
|
||||
var i int
|
||||
for label := range w.summaries {
|
||||
labels[i] = strings.ToLower(label)
|
||||
i++
|
||||
}
|
||||
sort.Strings(labels)
|
||||
|
||||
categories := csaf.ROLIEServiceWorkspaceCollectionCategories{
|
||||
Category: []csaf.ROLIEServiceWorkspaceCollectionCategoriesCategory{{
|
||||
Scheme: "urn:ietf:params:rolie:category:information-type",
|
||||
Term: "csaf",
|
||||
}},
|
||||
}
|
||||
|
||||
var collections []csaf.ROLIEServiceWorkspaceCollection
|
||||
|
||||
for _, ts := range labels {
|
||||
feedName := "csaf-feed-tlp-" + ts + ".json"
|
||||
|
||||
hrefURL, err := w.getProviderBaseURL()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
hrefURL = hrefURL.JoinPath(ts, feedName)
|
||||
|
||||
collection := csaf.ROLIEServiceWorkspaceCollection{
|
||||
Title: "CSAF feed (TLP:" + strings.ToUpper(ts) + ")",
|
||||
HRef: hrefURL.String(),
|
||||
Categories: categories,
|
||||
}
|
||||
collections = append(collections, collection)
|
||||
}
|
||||
|
||||
rsd := &csaf.ROLIEServiceDocument{
|
||||
Service: csaf.ROLIEService{
|
||||
Workspace: []csaf.ROLIEServiceWorkspace{{
|
||||
Title: "CSAF feeds",
|
||||
Collection: collections,
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
path := filepath.Join(w.dir, "service.json")
|
||||
return util.WriteToFile(path, rsd)
|
||||
}
|
||||
|
||||
func (w *worker) writeIndices() error {
|
||||
|
||||
if len(w.summaries) == 0 || w.dir == "" {
|
||||
w.writeROLIENoSummaries("undefined")
|
||||
return nil
|
||||
}
|
||||
|
||||
for label, summaries := range w.summaries {
|
||||
log.Printf("%s: %d\n", label, len(summaries))
|
||||
w.log.Debug("Writing indices", "label", label, "summaries.num", len(summaries))
|
||||
if err := w.writeInterims(label, summaries); err != nil {
|
||||
return err
|
||||
}
|
||||
// Only write index.txt and changes.csv if configured.
|
||||
if w.provider.writeIndices(w.processor.cfg) {
|
||||
if err := w.writeCSV(label, summaries); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := w.writeIndex(label, summaries); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := w.writeROLIE(label, summaries); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := w.writeCategories(label); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadIndex loads baseURL/index.txt and returns a list of files
|
||||
// prefixed by baseURL/.
|
||||
func (w *worker) loadIndex(baseURL string) ([]string, error) {
|
||||
indexURL := baseURL + "/index.txt"
|
||||
resp, err := w.client.Get(indexURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var lines []string
|
||||
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
|
||||
for scanner.Scan() {
|
||||
lines = append(lines, baseURL+"/"+scanner.Text())
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return lines, nil
|
||||
return w.writeService()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -13,11 +13,9 @@ import (
|
|||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
|
@ -25,8 +23,9 @@ import (
|
|||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
type interimJob struct {
|
||||
|
|
@ -34,28 +33,35 @@ type interimJob struct {
|
|||
err error
|
||||
}
|
||||
|
||||
// statusExpr is used as an expression to check the new status
|
||||
// of an advisory which was interim before.
|
||||
const statusExpr = `$.document.tracking.status`
|
||||
|
||||
// checkInterims checks the current status of the given
|
||||
// interim advisories. It returns a slice of advisories
|
||||
// which are not finished, yet.
|
||||
func (w *worker) checkInterims(
|
||||
tx *lazyTransaction,
|
||||
label string,
|
||||
interims [][2]string,
|
||||
) ([]string, error) {
|
||||
interims []interimsEntry,
|
||||
) ([]interimsEntry, error) {
|
||||
|
||||
var data bytes.Buffer
|
||||
|
||||
labelPath := filepath.Join(tx.Src(), label)
|
||||
|
||||
// advisories which are not interim any longer.
|
||||
var finalized []string
|
||||
var notFinalized []interimsEntry
|
||||
|
||||
for _, interim := range interims {
|
||||
|
||||
local := filepath.Join(labelPath, interim[0])
|
||||
url := interim[1]
|
||||
local := filepath.Join(labelPath, interim.path())
|
||||
url := interim.url()
|
||||
|
||||
// Load local SHA256 of the advisory
|
||||
localHash, err := util.HashFromFile(local + ".sha256")
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res, err := w.client.Get(url)
|
||||
|
|
@ -71,11 +77,11 @@ func (w *worker) checkInterims(
|
|||
data.Reset()
|
||||
hasher := io.MultiWriter(s256, &data)
|
||||
|
||||
var doc interface{}
|
||||
var doc any
|
||||
if err := func() error {
|
||||
defer res.Body.Close()
|
||||
tee := io.TeeReader(res.Body, hasher)
|
||||
return json.NewDecoder(tee).Decode(&doc)
|
||||
return misc.StrictJSONParse(tee, &doc)
|
||||
}(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -84,6 +90,7 @@ func (w *worker) checkInterims(
|
|||
|
||||
// If the hashes are equal then we can ignore this advisory.
|
||||
if bytes.Equal(localHash, remoteHash) {
|
||||
notFinalized = append(notFinalized, interim)
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
@ -94,19 +101,19 @@ func (w *worker) checkInterims(
|
|||
|
||||
// XXX: Should we return an error here?
|
||||
for _, e := range errors {
|
||||
log.Printf("validation error: %s: %v\n", url, e)
|
||||
w.log.Error("validation error", "url", url, "err", e)
|
||||
}
|
||||
|
||||
// We need to write the changed content.
|
||||
|
||||
// This will start the transcation if not already started.
|
||||
// This will start the transaction if not already started.
|
||||
dst, err := tx.Dst()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Overwrite in the cloned folder.
|
||||
nlocal := filepath.Join(dst, label, interim[0])
|
||||
nlocal := filepath.Join(dst, label, interim.path())
|
||||
|
||||
bytes := data.Bytes()
|
||||
|
||||
|
|
@ -135,26 +142,36 @@ func (w *worker) checkInterims(
|
|||
if err := w.downloadSignatureOrSign(sigURL, ascFile, bytes); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check if we can remove this advisory as it is not interim any more.
|
||||
var status string
|
||||
if err := w.expr.Extract(statusExpr, util.StringMatcher(&status), true, doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if status == "interim" {
|
||||
notFinalized = append(notFinalized, interim)
|
||||
}
|
||||
}
|
||||
|
||||
return finalized, nil
|
||||
return notFinalized, nil
|
||||
}
|
||||
|
||||
// setupProviderInterim prepares the worker for a specific provider.
|
||||
func (w *worker) setupProviderInterim(provider *provider) {
|
||||
log.Printf("worker #%d: %s (%s)\n",
|
||||
w.num, provider.Name, provider.Domain)
|
||||
w.log.Info("Setting up worker", provider.Name, provider.Domain)
|
||||
|
||||
w.dir = ""
|
||||
w.provider = provider
|
||||
|
||||
// Each job needs a separate client.
|
||||
w.client = w.cfg.httpClient(provider)
|
||||
w.client = w.processor.cfg.httpClient(provider)
|
||||
}
|
||||
|
||||
func (w *worker) interimWork(wg *sync.WaitGroup, jobs <-chan *interimJob) {
|
||||
defer wg.Done()
|
||||
path := filepath.Join(w.cfg.Web, ".well-known", "csaf-aggregator")
|
||||
path := filepath.Join(w.processor.cfg.Web, ".well-known", "csaf-aggregator")
|
||||
|
||||
tooOld := w.processor.cfg.tooOldForInterims()
|
||||
|
||||
for j := range jobs {
|
||||
w.setupProviderInterim(j.provider)
|
||||
|
|
@ -162,7 +179,7 @@ func (w *worker) interimWork(wg *sync.WaitGroup, jobs <-chan *interimJob) {
|
|||
providerPath := filepath.Join(path, j.provider.Name)
|
||||
|
||||
j.err = func() error {
|
||||
tx := newLazyTransaction(providerPath, w.cfg.Folder)
|
||||
tx := newLazyTransaction(providerPath, w.processor.cfg.Folder)
|
||||
defer tx.rollback()
|
||||
|
||||
// Try all the labels
|
||||
|
|
@ -176,9 +193,8 @@ func (w *worker) interimWork(wg *sync.WaitGroup, jobs <-chan *interimJob) {
|
|||
label = strings.ToLower(label)
|
||||
labelPath := filepath.Join(providerPath, label)
|
||||
|
||||
interimsCSV := filepath.Join(labelPath, "interims.csv")
|
||||
interims, err := readInterims(
|
||||
interimsCSV, w.cfg.InterimYears)
|
||||
interCSV := filepath.Join(labelPath, interimsCSV)
|
||||
interims, olds, err := readInterims(interCSV, tooOld)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -189,23 +205,30 @@ func (w *worker) interimWork(wg *sync.WaitGroup, jobs <-chan *interimJob) {
|
|||
}
|
||||
|
||||
// Compare locals against remotes.
|
||||
finalized, err := w.checkInterims(tx, label, interims)
|
||||
notFinalized, err := w.checkInterims(tx, label, interims)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(finalized) > 0 {
|
||||
// Nothing has changed.
|
||||
if len(notFinalized) == len(interims) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Simply append the olds. Maybe we got re-configured with
|
||||
// a greater interims interval later.
|
||||
notFinalized = append(notFinalized, olds...)
|
||||
|
||||
// We want to write in the transaction folder.
|
||||
dst, err := tx.Dst()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
interimsCSV := filepath.Join(dst, label, "interims.csv")
|
||||
if err := writeInterims(interimsCSV, finalized); err != nil {
|
||||
ninterCSV := filepath.Join(dst, label, interimsCSV)
|
||||
if err := writeInterims(ninterCSV, notFinalized); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return tx.commit()
|
||||
}()
|
||||
}
|
||||
|
|
@ -231,16 +254,16 @@ func joinErrors(errs []error) error {
|
|||
func (p *processor) interim() error {
|
||||
|
||||
if !p.cfg.runAsMirror() {
|
||||
return errors.New("iterim in lister mode does not work")
|
||||
return errors.New("interim in lister mode does not work")
|
||||
}
|
||||
|
||||
queue := make(chan *interimJob)
|
||||
var wg sync.WaitGroup
|
||||
|
||||
log.Printf("Starting %d workers.\n", p.cfg.Workers)
|
||||
p.log.Info("Starting workers...", "num", p.cfg.Workers)
|
||||
for i := 1; i <= p.cfg.Workers; i++ {
|
||||
wg.Add(1)
|
||||
w := newWorker(i, p.cfg)
|
||||
w := newWorker(i, p)
|
||||
go w.interimWork(&wg, queue)
|
||||
}
|
||||
|
||||
|
|
@ -265,49 +288,17 @@ func (p *processor) interim() error {
|
|||
return joinErrors(errs)
|
||||
}
|
||||
|
||||
func writeInterims(interimsCSV string, finalized []string) error {
|
||||
type interimsEntry [3]string
|
||||
|
||||
// In case this is a longer list (unlikely).
|
||||
removed := make(map[string]bool, len(finalized))
|
||||
for _, f := range finalized {
|
||||
removed[f] = true
|
||||
}
|
||||
// func (ie interimsEntry) date() string { return ie[0] }
|
||||
func (ie interimsEntry) path() string { return ie[1] }
|
||||
func (ie interimsEntry) url() string { return ie[2] }
|
||||
|
||||
lines, err := func() ([][]string, error) {
|
||||
interimsF, err := os.Open(interimsCSV)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer interimsF.Close()
|
||||
c := csv.NewReader(interimsF)
|
||||
c.FieldsPerRecord = 3
|
||||
func writeInterims(interimsCSV string, interims []interimsEntry) error {
|
||||
|
||||
var lines [][]string
|
||||
for {
|
||||
record, err := c.Read()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// If not finalized it survives
|
||||
if !removed[record[1]] {
|
||||
lines = append(lines, record)
|
||||
}
|
||||
}
|
||||
return lines, nil
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// All interims are finalized now -> remove file.
|
||||
if len(lines) == 0 {
|
||||
if len(interims) == 0 {
|
||||
return os.RemoveAll(interimsCSV)
|
||||
}
|
||||
|
||||
// Overwrite old. It's save because we are in a transaction.
|
||||
|
||||
f, err := os.Create(interimsCSV)
|
||||
|
|
@ -316,8 +307,10 @@ func writeInterims(interimsCSV string, finalized []string) error {
|
|||
}
|
||||
c := csv.NewWriter(f)
|
||||
|
||||
if err := c.WriteAll(lines); err != nil {
|
||||
return f.Close()
|
||||
for _, ie := range interims {
|
||||
if err := c.Write(ie[:]); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
c.Flush()
|
||||
|
|
@ -330,51 +323,57 @@ func writeInterims(interimsCSV string, finalized []string) error {
|
|||
}
|
||||
|
||||
// readInterims scans a interims.csv file for matching
|
||||
// iterim advisories. Its sorted with youngest
|
||||
// interim advisories. Its sorted with youngest
|
||||
// first, so we can stop scanning if entries get too old.
|
||||
func readInterims(interimsCSV string, years int) ([][2]string, error) {
|
||||
|
||||
var tooOld func(time.Time) bool
|
||||
|
||||
if years <= 0 {
|
||||
tooOld = func(time.Time) bool { return false }
|
||||
} else {
|
||||
from := time.Now().AddDate(-years, 0, 0)
|
||||
tooOld = func(t time.Time) bool { return t.Before(from) }
|
||||
}
|
||||
// It returns two slices: The advisories that are young enough
|
||||
// and a slice of the advisories that are too old.
|
||||
func readInterims(
|
||||
interimsCSV string,
|
||||
tooOld func(time.Time) bool,
|
||||
) ([]interimsEntry, []interimsEntry, error) {
|
||||
|
||||
interimsF, err := os.Open(interimsCSV)
|
||||
if err != nil {
|
||||
// None existing file -> no interims.
|
||||
if os.IsNotExist(err) {
|
||||
return nil, nil
|
||||
return nil, nil, nil
|
||||
}
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
defer interimsF.Close()
|
||||
|
||||
c := csv.NewReader(interimsF)
|
||||
c.FieldsPerRecord = 3
|
||||
|
||||
var files [][2]string
|
||||
var files, olds []interimsEntry
|
||||
|
||||
youngEnough := true
|
||||
|
||||
for {
|
||||
record, err := c.Read()
|
||||
row, err := c.Read()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
t, err := time.Parse(time.RFC3339, record[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if tooOld(t) {
|
||||
break
|
||||
}
|
||||
files = append(files, [2]string{record[1], record[2]})
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return files, nil
|
||||
if youngEnough {
|
||||
t, err := time.Parse(time.RFC3339, row[0])
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if tooOld(t) {
|
||||
olds = []interimsEntry{{row[0], row[1], row[2]}}
|
||||
youngEnough = false
|
||||
} else {
|
||||
files = append(files, interimsEntry{row[0], row[1], row[2]})
|
||||
}
|
||||
} else {
|
||||
// These are too old.
|
||||
olds = append(olds, interimsEntry{row[0], row[1], row[2]})
|
||||
}
|
||||
}
|
||||
|
||||
return files, olds, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -9,10 +9,11 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
type lazyTransaction struct {
|
||||
|
|
@ -47,7 +48,7 @@ func (lt *lazyTransaction) Dst() (string, error) {
|
|||
}
|
||||
|
||||
// Copy old content into new.
|
||||
if err := util.DeepCopy(lt.dst, lt.src); err != nil {
|
||||
if err := util.DeepCopy(dst, lt.src); err != nil {
|
||||
os.RemoveAll(dst)
|
||||
return "", err
|
||||
}
|
||||
|
|
@ -71,16 +72,25 @@ func (lt *lazyTransaction) commit() error {
|
|||
}
|
||||
defer func() { lt.dst = "" }()
|
||||
|
||||
// Switch directories.
|
||||
symlink := filepath.Join(lt.dstDir, filepath.Base(lt.src))
|
||||
if err := os.Symlink(lt.dstDir, symlink); err != nil {
|
||||
os.RemoveAll(lt.dstDir)
|
||||
return err
|
||||
}
|
||||
if err := os.Rename(symlink, lt.src); err != nil {
|
||||
os.RemoveAll(lt.dstDir)
|
||||
// The expanded path of the original link.
|
||||
orig, err := filepath.EvalSymlinks(lt.src)
|
||||
if err != nil {
|
||||
os.RemoveAll(lt.dst)
|
||||
return err
|
||||
}
|
||||
|
||||
return os.RemoveAll(lt.src)
|
||||
// Switch directories.
|
||||
symlink := filepath.Join(lt.dst, filepath.Base(lt.src))
|
||||
if err := os.Symlink(lt.dst, symlink); err != nil {
|
||||
os.RemoveAll(lt.dst)
|
||||
return err
|
||||
}
|
||||
|
||||
slog.Debug("Moving directory", "from", symlink, "to", lt.src)
|
||||
if err := os.Rename(symlink, lt.src); err != nil {
|
||||
os.RemoveAll(lt.dst)
|
||||
return err
|
||||
}
|
||||
|
||||
return os.RemoveAll(orig)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -11,8 +11,8 @@ package main
|
|||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
// mirrorAllowed checks if mirroring is allowed.
|
||||
|
|
|
|||
|
|
@ -1,43 +1,36 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
// Package main implements the csaf_aggregator tool.
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/options"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gofrs/flock"
|
||||
"github.com/jessevdk/go-flags"
|
||||
)
|
||||
|
||||
type options struct {
|
||||
Config string `short:"c" long:"config" description:"File name of the configuration file" value-name:"CFG-FILE" default:"aggregator.toml"`
|
||||
Version bool `long:"version" description:"Display version of the binary"`
|
||||
Interim bool `short:"i" long:"interim" description:"Perform an interim scan"`
|
||||
}
|
||||
|
||||
func errCheck(err error) {
|
||||
if err != nil {
|
||||
if e, ok := err.(*flags.Error); ok && e.Type == flags.ErrHelp {
|
||||
os.Exit(0)
|
||||
}
|
||||
log.Fatalf("error: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
func lock(lockFile *string, fn func() error) error {
|
||||
if lockFile == nil {
|
||||
// No locking configured.
|
||||
return fn()
|
||||
}
|
||||
|
||||
err := os.MkdirAll(filepath.Dir(*lockFile), 0700)
|
||||
if err != nil {
|
||||
return fmt.Errorf("file locking failed: %v", err)
|
||||
}
|
||||
|
||||
fl := flock.New(*lockFile)
|
||||
locked, err := fl.TryLock()
|
||||
if err != nil {
|
||||
|
|
@ -45,32 +38,17 @@ func lock(lockFile *string, fn func() error) error {
|
|||
}
|
||||
|
||||
if !locked {
|
||||
return fmt.Errorf("cannot lock to file %s", *lockFile)
|
||||
return fmt.Errorf("cannot acquire file lock at %s. Maybe the CSAF aggregator is already running?", *lockFile)
|
||||
}
|
||||
defer fl.Unlock()
|
||||
return fn()
|
||||
}
|
||||
|
||||
func main() {
|
||||
opts := new(options)
|
||||
|
||||
_, err := flags.Parse(opts)
|
||||
errCheck(err)
|
||||
|
||||
if opts.Version {
|
||||
fmt.Println(util.SemVersion)
|
||||
return
|
||||
}
|
||||
|
||||
interim := opts.Interim
|
||||
|
||||
cfg, err := loadConfig(opts.Config)
|
||||
errCheck(err)
|
||||
|
||||
if interim {
|
||||
cfg.Interim = true
|
||||
}
|
||||
|
||||
p := processor{cfg: cfg}
|
||||
errCheck(lock(cfg.LockFile, p.process))
|
||||
_, cfg, err := parseArgsConfig()
|
||||
cfg.prepareLogging()
|
||||
options.ErrorCheckStructured(err)
|
||||
options.ErrorCheckStructured(cfg.prepare())
|
||||
p := processor{cfg: cfg, log: slog.Default()}
|
||||
options.ErrorCheckStructured(lock(cfg.LockFile, p.process))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -13,10 +13,9 @@ import (
|
|||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
|
|
@ -26,77 +25,15 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ProtonMail/gopenpgp/v2/armor"
|
||||
"github.com/ProtonMail/gopenpgp/v2/constants"
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
func (w *worker) handleROLIE(
|
||||
rolie interface{},
|
||||
process func(*csaf.TLPLabel, []string) error,
|
||||
) error {
|
||||
base, err := url.Parse(w.loc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var feeds [][]csaf.Feed
|
||||
if err := util.ReMarshalJSON(&feeds, rolie); err != nil {
|
||||
return err
|
||||
}
|
||||
log.Printf("Found %d ROLIE feed(s).\n", len(feeds))
|
||||
|
||||
for _, fs := range feeds {
|
||||
for i := range fs {
|
||||
feed := &fs[i]
|
||||
if feed.URL == nil {
|
||||
continue
|
||||
}
|
||||
up, err := url.Parse(string(*feed.URL))
|
||||
if err != nil {
|
||||
log.Printf("Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||
continue
|
||||
}
|
||||
feedURL := base.ResolveReference(up).String()
|
||||
log.Printf("Feed URL: %s\n", feedURL)
|
||||
|
||||
fb, err := util.BaseURL(feedURL)
|
||||
if err != nil {
|
||||
log.Printf("error: Invalid feed base URL '%s': %v\n", fb, err)
|
||||
continue
|
||||
}
|
||||
feedBaseURL, err := url.Parse(fb)
|
||||
if err != nil {
|
||||
log.Printf("error: Cannot parse feed base URL '%s': %v\n", fb, err)
|
||||
continue
|
||||
}
|
||||
|
||||
res, err := w.client.Get(feedURL)
|
||||
if err != nil {
|
||||
log.Printf("error: Cannot get feed '%s'\n", err)
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
log.Printf("error: Fetching %s failed. Status code %d (%s)",
|
||||
feedURL, res.StatusCode, res.Status)
|
||||
continue
|
||||
}
|
||||
rfeed, err := func() (*csaf.ROLIEFeed, error) {
|
||||
defer res.Body.Close()
|
||||
return csaf.LoadROLIEFeed(res.Body)
|
||||
}()
|
||||
if err != nil {
|
||||
log.Printf("Loading ROLIE feed failed: %v.", err)
|
||||
continue
|
||||
}
|
||||
files := resolveURLs(rfeed.Files(), feedBaseURL)
|
||||
if err := process(feed.TLPLabel, files); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// mirrorAllowed checks if mirroring is allowed.
|
||||
func (w *worker) mirrorAllowed() bool {
|
||||
var b bool
|
||||
|
|
@ -110,7 +47,7 @@ func (w *worker) mirror() (*csaf.AggregatorCSAFProvider, error) {
|
|||
if err != nil && w.dir != "" {
|
||||
// If something goes wrong remove the debris.
|
||||
if err := os.RemoveAll(w.dir); err != nil {
|
||||
log.Printf("error: %v\n", err)
|
||||
w.log.Error("Could not remove directory", "path", w.dir, "err", err)
|
||||
}
|
||||
}
|
||||
return result, err
|
||||
|
|
@ -127,38 +64,25 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) {
|
|||
// Collecting the summaries of the advisories.
|
||||
w.summaries = make(map[string][]summary)
|
||||
|
||||
// Check if we have ROLIE feeds.
|
||||
rolie, err := w.expr.Eval(
|
||||
"$.distributions[*].rolie.feeds", w.metadataProvider)
|
||||
// Collecting the categories per label.
|
||||
w.categories = map[string]util.Set[string]{}
|
||||
|
||||
pmdURL, err := url.Parse(w.loc)
|
||||
if err != nil {
|
||||
log.Printf("rolie check failed: %v\n", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fs, hasRolie := rolie.([]interface{})
|
||||
hasRolie = hasRolie && len(fs) > 0
|
||||
afp := csaf.NewAdvisoryFileProcessor(
|
||||
w.client,
|
||||
w.expr,
|
||||
w.metadataProvider,
|
||||
pmdURL)
|
||||
|
||||
if hasRolie {
|
||||
if err := w.handleROLIE(rolie, w.mirrorFiles); err != nil {
|
||||
afp.AgeAccept = w.provider.ageAccept(w.processor.cfg)
|
||||
|
||||
if err := afp.Process(w.mirrorFiles); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
// No rolie feeds -> try to load files from index.txt
|
||||
baseURL, err := util.BaseURL(w.loc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files, err := w.loadIndex(baseURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_ = files
|
||||
// XXX: Is treating as white okay? better look into the advisories?
|
||||
white := csaf.TLPLabel(csaf.TLPLabelWhite)
|
||||
if err := w.mirrorFiles(&white, files); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} // TODO: else scan directories?
|
||||
|
||||
if err := w.writeIndices(); err != nil {
|
||||
return nil, err
|
||||
|
|
@ -179,9 +103,13 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) {
|
|||
}
|
||||
|
||||
// Add us as a mirror.
|
||||
mirror, err := w.getProviderBaseURL()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
mirrorURL := csaf.ProviderURL(
|
||||
fmt.Sprintf("%s/.well-known/csaf-aggregator/%s/provider-metadata.json",
|
||||
w.cfg.Domain, w.provider.Name))
|
||||
mirror.JoinPath("provider-metadata.json").String(),
|
||||
)
|
||||
|
||||
acp.Mirrors = []csaf.ProviderURL{
|
||||
mirrorURL,
|
||||
|
|
@ -193,7 +121,7 @@ func (w *worker) mirrorInternal() (*csaf.AggregatorCSAFProvider, error) {
|
|||
func (w *worker) labelsFromSummaries() []csaf.TLPLabel {
|
||||
labels := make([]csaf.TLPLabel, 0, len(w.summaries))
|
||||
for label := range w.summaries {
|
||||
labels = append(labels, csaf.TLPLabel(label))
|
||||
labels = append(labels, csaf.TLPLabel(strings.ToUpper(label)))
|
||||
}
|
||||
sort.Slice(labels, func(i, j int) bool { return labels[i] < labels[j] })
|
||||
return labels
|
||||
|
|
@ -204,10 +132,26 @@ func (w *worker) writeProviderMetadata() error {
|
|||
|
||||
fname := filepath.Join(w.dir, "provider-metadata.json")
|
||||
|
||||
prefixURL, err := w.getProviderBaseURL()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pm := csaf.NewProviderMetadataPrefix(
|
||||
w.cfg.Domain+"/.well-known/csaf-aggregator/"+w.provider.Name,
|
||||
prefixURL.String(),
|
||||
w.labelsFromSummaries())
|
||||
|
||||
// Fill in directory URLs if needed.
|
||||
if w.provider.writeIndices(w.processor.cfg) {
|
||||
labels := make([]string, 0, len(w.summaries))
|
||||
for label := range w.summaries {
|
||||
labels = append(labels, label)
|
||||
}
|
||||
sort.Strings(labels)
|
||||
for _, label := range labels {
|
||||
pm.AddDirectoryDistribution(prefixURL.JoinPath(label).String())
|
||||
}
|
||||
}
|
||||
|
||||
// Figure out the role
|
||||
var role csaf.MetadataRole
|
||||
|
||||
|
|
@ -229,7 +173,7 @@ func (w *worker) writeProviderMetadata() error {
|
|||
{Expr: `$.public_openpgp_keys`, Action: util.ReMarshalMatcher(&pm.PGPKeys)},
|
||||
}, w.metadataProvider); err != nil {
|
||||
// only log the errors
|
||||
log.Printf("extracting data from orignal provider failed: %v\n", err)
|
||||
w.log.Error("Extracting data from original provider failed", "err", err)
|
||||
}
|
||||
|
||||
// We are mirroring the remote public keys, too.
|
||||
|
|
@ -251,19 +195,22 @@ func (w *worker) mirrorPGPKeys(pm *csaf.ProviderMetadata) error {
|
|||
return err
|
||||
}
|
||||
|
||||
keyURL, err := w.getProviderBaseURL()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
localKeyURL := func(fingerprint string) string {
|
||||
return fmt.Sprintf("%s/.well-known/csaf-aggregator/%s/openpgp/%s.asc",
|
||||
w.cfg.Domain, w.provider.Name, fingerprint)
|
||||
return keyURL.JoinPath("openpgp", (fingerprint + ".asc")).String()
|
||||
}
|
||||
|
||||
for i := range pm.PGPKeys {
|
||||
pgpKey := &pm.PGPKeys[i]
|
||||
if pgpKey.URL == nil {
|
||||
log.Printf("ignoring PGP key without URL: %s\n", pgpKey.Fingerprint)
|
||||
w.log.Warn("Ignoring PGP key without URL", "fingerprint", pgpKey.Fingerprint)
|
||||
continue
|
||||
}
|
||||
if _, err := hex.DecodeString(string(pgpKey.Fingerprint)); err != nil {
|
||||
log.Printf("ignoring PGP with invalid fingerprint: %s\n", *pgpKey.URL)
|
||||
w.log.Warn("Ignoring PGP key with invalid fingerprint", "url", *pgpKey.URL)
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
@ -303,18 +250,18 @@ func (w *worker) mirrorPGPKeys(pm *csaf.ProviderMetadata) error {
|
|||
}
|
||||
|
||||
// replace the URL
|
||||
url := localKeyURL(fingerprint)
|
||||
pgpKey.URL = &url
|
||||
u := localKeyURL(fingerprint)
|
||||
pgpKey.URL = &u
|
||||
}
|
||||
|
||||
// If we have public key configured copy it into the new folder
|
||||
|
||||
if w.cfg.OpenPGPPublicKey == "" {
|
||||
if w.processor.cfg.OpenPGPPublicKey == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Load the key for the fingerprint.
|
||||
data, err := os.ReadFile(w.cfg.OpenPGPPublicKey)
|
||||
data, err := os.ReadFile(w.processor.cfg.OpenPGPPublicKey)
|
||||
if err != nil {
|
||||
os.RemoveAll(openPGPFolder)
|
||||
return err
|
||||
|
|
@ -371,7 +318,7 @@ func (w *worker) createAggregatorProvider() (*csaf.AggregatorCSAFProvider, error
|
|||
var (
|
||||
lastUpdated = csaf.TimeStamp(lastUpdatedT)
|
||||
role = csaf.MetadataRole(roleS)
|
||||
url = csaf.ProviderURL(urlS)
|
||||
providerURL = csaf.ProviderURL(urlS)
|
||||
)
|
||||
|
||||
return &csaf.AggregatorCSAFProvider{
|
||||
|
|
@ -379,7 +326,7 @@ func (w *worker) createAggregatorProvider() (*csaf.AggregatorCSAFProvider, error
|
|||
LastUpdated: &lastUpdated,
|
||||
Publisher: &pub,
|
||||
Role: &role,
|
||||
URL: &url,
|
||||
URL: &providerURL,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
|
@ -388,7 +335,7 @@ func (w *worker) createAggregatorProvider() (*csaf.AggregatorCSAFProvider, error
|
|||
func (w *worker) doMirrorTransaction() error {
|
||||
|
||||
webTarget := filepath.Join(
|
||||
w.cfg.Web, ".well-known", "csaf-aggregator", w.provider.Name)
|
||||
w.processor.cfg.Web, ".well-known", "csaf-aggregator", w.provider.Name)
|
||||
|
||||
var oldWeb string
|
||||
|
||||
|
|
@ -406,8 +353,8 @@ func (w *worker) doMirrorTransaction() error {
|
|||
}
|
||||
|
||||
// Check if there is a sysmlink already.
|
||||
target := filepath.Join(w.cfg.Folder, w.provider.Name)
|
||||
log.Printf("target: '%s'\n", target)
|
||||
target := filepath.Join(w.processor.cfg.Folder, w.provider.Name)
|
||||
w.log.Debug("Checking for path existance", "path", target)
|
||||
|
||||
exists, err := util.PathExists(target)
|
||||
if err != nil {
|
||||
|
|
@ -422,7 +369,7 @@ func (w *worker) doMirrorTransaction() error {
|
|||
}
|
||||
}
|
||||
|
||||
log.Printf("sym link: %s -> %s\n", w.dir, target)
|
||||
w.log.Debug("Creating sym link", "from", w.dir, "to", target)
|
||||
|
||||
// Create a new symlink
|
||||
if err := os.Symlink(w.dir, target); err != nil {
|
||||
|
|
@ -431,7 +378,7 @@ func (w *worker) doMirrorTransaction() error {
|
|||
}
|
||||
|
||||
// Move the symlink
|
||||
log.Printf("Move: %s -> %s\n", target, webTarget)
|
||||
w.log.Debug("Moving sym link", "from", target, "to", webTarget)
|
||||
if err := os.Rename(target, webTarget); err != nil {
|
||||
os.RemoveAll(w.dir)
|
||||
return err
|
||||
|
|
@ -470,14 +417,14 @@ func (w *worker) downloadSignature(path string) (string, error) {
|
|||
// sign signs the given data with the configured key.
|
||||
func (w *worker) sign(data []byte) (string, error) {
|
||||
if w.signRing == nil {
|
||||
key, err := w.cfg.privateOpenPGPKey()
|
||||
key, err := w.processor.cfg.privateOpenPGPKey()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if key == nil {
|
||||
return "", nil
|
||||
}
|
||||
if pp := w.cfg.Passphrase; pp != nil {
|
||||
if pp := w.processor.cfg.Passphrase; pp != nil {
|
||||
if key, err = key.Unlock([]byte(*pp)); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
|
@ -490,15 +437,64 @@ func (w *worker) sign(data []byte) (string, error) {
|
|||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return sig.GetArmored()
|
||||
return armor.ArmorWithTypeAndCustomHeaders(
|
||||
sig.Data, constants.PGPSignatureHeader, "", "")
|
||||
}
|
||||
|
||||
func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
|
||||
label := "unknown"
|
||||
if tlpLabel != nil {
|
||||
label = strings.ToLower(string(*tlpLabel))
|
||||
func (w *worker) extractCategories(label string, advisory any) error {
|
||||
|
||||
// use provider or global categories
|
||||
var categories []string
|
||||
if w.provider.Categories != nil {
|
||||
categories = *w.provider.Categories
|
||||
} else if w.processor.cfg.Categories != nil {
|
||||
categories = *w.processor.cfg.Categories
|
||||
}
|
||||
|
||||
// Nothing to do.
|
||||
if len(categories) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
cats := w.categories[label]
|
||||
if cats == nil {
|
||||
cats = util.Set[string]{}
|
||||
w.categories[label] = cats
|
||||
}
|
||||
|
||||
const exprPrefix = "expr:"
|
||||
|
||||
var dynamic []string
|
||||
matcher := util.StringTreeMatcher(&dynamic)
|
||||
|
||||
for _, cat := range categories {
|
||||
if strings.HasPrefix(cat, exprPrefix) {
|
||||
expr := cat[len(exprPrefix):]
|
||||
// Compile first to check that the expression is okay.
|
||||
if _, err := w.expr.Compile(expr); err != nil {
|
||||
slog.Error("Compiling category expression failed",
|
||||
"expr", expr,
|
||||
"err", err)
|
||||
continue
|
||||
}
|
||||
// Ignore errors here as they result from not matching.
|
||||
w.expr.Extract(expr, matcher, true, advisory)
|
||||
} else { // Normal
|
||||
cats.Add(cat)
|
||||
}
|
||||
}
|
||||
|
||||
// Add dynamic categories.
|
||||
for _, cat := range dynamic {
|
||||
cats.Add(cat)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *worker) mirrorFiles(tlpLabel csaf.TLPLabel, files []csaf.AdvisoryFile) error {
|
||||
label := strings.ToLower(string(tlpLabel))
|
||||
|
||||
summaries := w.summaries[label]
|
||||
|
||||
dir, err := w.createDir()
|
||||
|
|
@ -511,20 +507,29 @@ func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
|
|||
yearDirs := make(map[int]string)
|
||||
|
||||
for _, file := range files {
|
||||
u, err := url.Parse(file)
|
||||
|
||||
u, err := url.Parse(file.URL())
|
||||
if err != nil {
|
||||
log.Printf("error: %s\n", err)
|
||||
w.log.Error("Could not parse advisory file URL", "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Ignore not confirming filenames.
|
||||
// Should we ignore this advisory?
|
||||
if w.provider.ignoreURL(file.URL(), w.processor.cfg) {
|
||||
if w.processor.cfg.Verbose {
|
||||
w.log.Info("Ignoring advisory", slog.Group("provider", "name", w.provider.Name), "file", file)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Ignore not conforming filenames.
|
||||
filename := filepath.Base(u.Path)
|
||||
if !util.ConfirmingFileName(filename) {
|
||||
log.Printf("Not confirming filename %q. Ignoring.\n", filename)
|
||||
if !util.ConformingFileName(filename) {
|
||||
w.log.Warn("Ignoring advisory because of non-conforming filename", "filename", filename)
|
||||
continue
|
||||
}
|
||||
|
||||
var advisory interface{}
|
||||
var advisory any
|
||||
|
||||
s256 := sha256.New()
|
||||
s512 := sha512.New()
|
||||
|
|
@ -533,34 +538,57 @@ func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
|
|||
|
||||
download := func(r io.Reader) error {
|
||||
tee := io.TeeReader(r, hasher)
|
||||
return json.NewDecoder(tee).Decode(&advisory)
|
||||
return misc.StrictJSONParse(tee, &advisory)
|
||||
}
|
||||
|
||||
if err := downloadJSON(w.client, file, download); err != nil {
|
||||
log.Printf("error: %v\n", err)
|
||||
if err := downloadJSON(w.client, file.URL(), download); err != nil {
|
||||
w.log.Error("Error while downloading JSON", "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Check against CSAF schema.
|
||||
errors, err := csaf.ValidateCSAF(advisory)
|
||||
if err != nil {
|
||||
log.Printf("error: %s: %v", file, err)
|
||||
w.log.Error("Error while validating CSAF schema", "err", err)
|
||||
continue
|
||||
}
|
||||
if len(errors) > 0 {
|
||||
log.Printf("CSAF file %s has %d validation errors.",
|
||||
file, len(errors))
|
||||
w.log.Error("CSAF file has validation errors", "num.errors", len(errors), "file", file)
|
||||
continue
|
||||
}
|
||||
|
||||
// Check against remote validator.
|
||||
if rmv := w.processor.remoteValidator; rmv != nil {
|
||||
rvr, err := rmv.Validate(advisory)
|
||||
if err != nil {
|
||||
w.log.Error("Calling remote validator failed", "err", err)
|
||||
continue
|
||||
}
|
||||
if !rvr.Valid {
|
||||
w.log.Error("CSAF file does not validate remotely", "file", file.URL())
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
sum, err := csaf.NewAdvisorySummary(w.expr, advisory)
|
||||
if err != nil {
|
||||
log.Printf("error: %s: %v\n", file, err)
|
||||
w.log.Error("Error while creating new advisory", "file", file, "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if util.CleanFileName(sum.ID) != filename {
|
||||
w.log.Error("ID mismatch", "id", sum.ID, "filename", filename)
|
||||
}
|
||||
|
||||
if err := w.extractCategories(label, advisory); err != nil {
|
||||
w.log.Error("Could not extract categories", "file", file, "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
summaries = append(summaries, summary{
|
||||
filename: filename,
|
||||
summary: sum,
|
||||
url: file,
|
||||
url: file.URL(),
|
||||
})
|
||||
|
||||
year := sum.InitialReleaseDate.Year()
|
||||
|
|
@ -571,12 +599,10 @@ func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
|
|||
if err := os.MkdirAll(yearDir, 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
//log.Printf("created %s\n", yearDir)
|
||||
yearDirs[year] = yearDir
|
||||
}
|
||||
|
||||
fname := filepath.Join(yearDir, filename)
|
||||
//log.Printf("write: %s\n", fname)
|
||||
data := content.Bytes()
|
||||
if err := writeFileHashes(
|
||||
fname, filename,
|
||||
|
|
@ -586,7 +612,7 @@ func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
|
|||
}
|
||||
|
||||
// Try to fetch signature file.
|
||||
sigURL := file + ".asc"
|
||||
sigURL := file.SignURL()
|
||||
ascFile := fname + ".asc"
|
||||
if err := w.downloadSignatureOrSign(sigURL, ascFile, data); err != nil {
|
||||
return err
|
||||
|
|
@ -601,10 +627,9 @@ func (w *worker) mirrorFiles(tlpLabel *csaf.TLPLabel, files []string) error {
|
|||
// If this fails it creates a signature itself with the configured key.
|
||||
func (w *worker) downloadSignatureOrSign(url, fname string, data []byte) error {
|
||||
sig, err := w.downloadSignature(url)
|
||||
|
||||
if err != nil {
|
||||
if err != errNotFound {
|
||||
log.Printf("error: %s: %v\n", url, err)
|
||||
w.log.Error("Could not find signature URL", "url", url, "err", err)
|
||||
}
|
||||
// Sign it our self.
|
||||
if sig, err = w.sign(data); err != nil {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -10,17 +10,26 @@ package main
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"log/slog"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
)
|
||||
|
||||
type processor struct {
|
||||
// cfg is the global configuration.
|
||||
cfg *config
|
||||
|
||||
// remoteValidator is a globally configured remote validator.
|
||||
remoteValidator csaf.RemoteValidator
|
||||
|
||||
// log is the structured logger for the whole processor.
|
||||
log *slog.Logger
|
||||
}
|
||||
|
||||
type summary struct {
|
||||
|
|
@ -31,23 +40,27 @@ type summary struct {
|
|||
|
||||
type worker struct {
|
||||
num int
|
||||
processor *processor
|
||||
|
||||
expr *util.PathEval
|
||||
cfg *config
|
||||
signRing *crypto.KeyRing
|
||||
|
||||
client util.Client // client per provider
|
||||
provider *provider // current provider
|
||||
metadataProvider interface{} // current metadata provider
|
||||
metadataProvider any // current metadata provider
|
||||
loc string // URL of current provider-metadata.json
|
||||
dir string // Directory to store data to.
|
||||
summaries map[string][]summary // the summaries of the advisories.
|
||||
categories map[string]util.Set[string] // the categories per label.
|
||||
log *slog.Logger // the structured logger, supplied with the worker number.
|
||||
}
|
||||
|
||||
func newWorker(num int, config *config) *worker {
|
||||
func newWorker(num int, processor *processor) *worker {
|
||||
return &worker{
|
||||
num: num,
|
||||
cfg: config,
|
||||
processor: processor,
|
||||
expr: util.NewPathEval(),
|
||||
log: processor.log.With(slog.Int("worker", num)),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -64,7 +77,7 @@ func (w *worker) createDir() (string, error) {
|
|||
return w.dir, nil
|
||||
}
|
||||
dir, err := util.MakeUniqDir(
|
||||
filepath.Join(w.cfg.Folder, w.provider.Name))
|
||||
filepath.Join(w.processor.cfg.Folder, w.provider.Name))
|
||||
if err == nil {
|
||||
w.dir = dir
|
||||
}
|
||||
|
|
@ -73,14 +86,25 @@ func (w *worker) createDir() (string, error) {
|
|||
|
||||
func (w *worker) locateProviderMetadata(domain string) error {
|
||||
|
||||
lpmd := csaf.LoadProviderMetadataForDomain(
|
||||
w.client, domain, func(format string, args ...interface{}) {
|
||||
log.Printf(
|
||||
"Looking for provider-metadata.json of '"+domain+"': "+format+"\n", args...)
|
||||
})
|
||||
loader := csaf.NewProviderMetadataLoader(w.client)
|
||||
|
||||
if lpmd == nil {
|
||||
return fmt.Errorf("no provider-metadata.json found for '%s'", domain)
|
||||
lpmd := loader.Load(domain)
|
||||
|
||||
if !lpmd.Valid() {
|
||||
for i := range lpmd.Messages {
|
||||
w.log.Error(
|
||||
"Loading provider-metadata.json",
|
||||
"domain", domain,
|
||||
"message", lpmd.Messages[i].Message)
|
||||
}
|
||||
return fmt.Errorf("no valid provider-metadata.json found for '%s'", domain)
|
||||
} else if w.processor.cfg.Verbose {
|
||||
for i := range lpmd.Messages {
|
||||
w.log.Debug(
|
||||
"Loading provider-metadata.json",
|
||||
"domain", domain,
|
||||
"message", lpmd.Messages[i].Message)
|
||||
}
|
||||
}
|
||||
|
||||
w.metadataProvider = lpmd.Document
|
||||
|
|
@ -89,12 +113,24 @@ func (w *worker) locateProviderMetadata(domain string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// getProviderBaseURL returns the base URL for the provider.
|
||||
func (w *worker) getProviderBaseURL() (*url.URL, error) {
|
||||
baseURL, err := url.Parse(w.processor.cfg.Domain)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
baseURL = baseURL.JoinPath(".well-known",
|
||||
"csaf-aggregator",
|
||||
w.provider.Name)
|
||||
return baseURL, nil
|
||||
}
|
||||
|
||||
// removeOrphans removes the directories that are not in the providers list.
|
||||
func (p *processor) removeOrphans() error {
|
||||
|
||||
keep := make(map[string]bool)
|
||||
keep := util.Set[string]{}
|
||||
for _, p := range p.cfg.Providers {
|
||||
keep[p.Name] = true
|
||||
keep.Add(p.Name)
|
||||
}
|
||||
|
||||
path := filepath.Join(p.cfg.Web, ".well-known", "csaf-aggregator")
|
||||
|
|
@ -122,13 +158,13 @@ func (p *processor) removeOrphans() error {
|
|||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
if keep[entry.Name()] {
|
||||
if keep.Contains(entry.Name()) {
|
||||
continue
|
||||
}
|
||||
|
||||
fi, err := entry.Info()
|
||||
if err != nil {
|
||||
log.Printf("error: %v\n", err)
|
||||
p.log.Error("Could not retrieve file info", "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
@ -140,13 +176,13 @@ func (p *processor) removeOrphans() error {
|
|||
d := filepath.Join(path, entry.Name())
|
||||
r, err := filepath.EvalSymlinks(d)
|
||||
if err != nil {
|
||||
log.Printf("error: %v\n", err)
|
||||
p.log.Error("Could not evaluate symlink", "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
fd, err := os.Stat(r)
|
||||
if err != nil {
|
||||
log.Printf("error: %v\n", err)
|
||||
p.log.Error("Could not retrieve file stats", "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
@ -156,18 +192,18 @@ func (p *processor) removeOrphans() error {
|
|||
}
|
||||
|
||||
// Remove the link.
|
||||
log.Printf("removing link %s -> %s\n", d, r)
|
||||
p.log.Info("Removing link", "path", fmt.Sprintf("%s -> %s", d, r))
|
||||
if err := os.Remove(d); err != nil {
|
||||
log.Printf("error: %v\n", err)
|
||||
p.log.Error("Could not remove symlink", "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Only remove directories which are in our folder.
|
||||
if rel, err := filepath.Rel(prefix, r); err == nil &&
|
||||
rel == filepath.Base(r) {
|
||||
log.Printf("removing directory %s\n", r)
|
||||
p.log.Info("Remove directory", "path", r)
|
||||
if err := os.RemoveAll(r); err != nil {
|
||||
log.Printf("error: %v\n", err)
|
||||
p.log.Error("Could not remove directory", "err", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,28 +0,0 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
// resolveURLs resolves a list of URLs urls against a base URL base.
|
||||
func resolveURLs(urls []string, base *url.URL) []string {
|
||||
out := make([]string, 0, len(urls))
|
||||
for _, u := range urls {
|
||||
p, err := url.Parse(u)
|
||||
if err != nil {
|
||||
log.Printf("error: Invalid URL '%s': %v\n", u, err)
|
||||
continue
|
||||
}
|
||||
out = append(out, base.ResolveReference(p).String())
|
||||
}
|
||||
return out
|
||||
}
|
||||
143
cmd/csaf_checker/config.go
Normal file
143
cmd/csaf_checker/config.go
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/certs"
|
||||
"github.com/gocsaf/csaf/v3/internal/filter"
|
||||
"github.com/gocsaf/csaf/v3/internal/models"
|
||||
"github.com/gocsaf/csaf/v3/internal/options"
|
||||
)
|
||||
|
||||
type outputFormat string
|
||||
|
||||
const (
|
||||
defaultPreset = "mandatory"
|
||||
defaultFormat = "json"
|
||||
)
|
||||
|
||||
type config struct {
|
||||
Output string `short:"o" long:"output" description:"File name of the generated report" value-name:"REPORT-FILE" toml:"output"`
|
||||
//lint:ignore SA5008 We are using choice twice: json, html.
|
||||
Format outputFormat `short:"f" long:"format" choice:"json" choice:"html" description:"Format of report" toml:"format"`
|
||||
Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider" toml:"insecure"`
|
||||
ClientCert *string `long:"client_cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE" toml:"client_cert"`
|
||||
ClientKey *string `long:"client_key" description:"TLS client private key file (PEM encoded data)" value-name:"KEY-FILE" toml:"client_key"`
|
||||
ClientPassphrase *string `long:"client_passphrase" description:"Optional passphrase for the client cert (limited, experimental, see downloader doc)" value-name:"PASSPHRASE" toml:"client_passphrase"`
|
||||
Version bool `long:"version" description:"Display version of the binary" toml:"-"`
|
||||
Verbose bool `long:"verbose" short:"v" description:"Verbose output" toml:"verbose"`
|
||||
Rate *float64 `long:"rate" short:"r" description:"The average upper limit of https operations per second (defaults to unlimited)" toml:"rate"`
|
||||
Range *models.TimeRange `long:"time_range" short:"t" description:"RANGE of time from which advisories to download" value-name:"RANGE" toml:"time_range"`
|
||||
IgnorePattern []string `long:"ignore_pattern" short:"i" description:"Do not download files if their URLs match any of the given PATTERNs" value-name:"PATTERN" toml:"ignore_pattern"`
|
||||
ExtraHeader http.Header `long:"header" short:"H" description:"One or more extra HTTP header fields" toml:"header"`
|
||||
RemoteValidator string `long:"validator" description:"URL to validate documents remotely" value-name:"URL" toml:"validator"`
|
||||
RemoteValidatorCache string `long:"validator_cache" description:"FILE to cache remote validations" value-name:"FILE" toml:"validator_cache"`
|
||||
RemoteValidatorPresets []string `long:"validator_preset" description:"One or more presets to validate remotely" toml:"validator_preset"`
|
||||
|
||||
Config string `short:"c" long:"config" description:"Path to config TOML file" value-name:"TOML-FILE" toml:"-"`
|
||||
|
||||
clientCerts []tls.Certificate
|
||||
ignorePattern filter.PatternMatcher
|
||||
}
|
||||
|
||||
// configPaths are the potential file locations of the config file.
|
||||
var configPaths = []string{
|
||||
"~/.config/csaf/checker.toml",
|
||||
"~/.csaf_checker.toml",
|
||||
"csaf_checker.toml",
|
||||
}
|
||||
|
||||
// UnmarshalText implements [encoding/text.TextUnmarshaler].
|
||||
func (of *outputFormat) UnmarshalText(text []byte) error {
|
||||
s := string(text)
|
||||
switch s {
|
||||
case "html", "json":
|
||||
*of = outputFormat(s)
|
||||
default:
|
||||
return fmt.Errorf(`%q is neither "html" nor "json"`, s)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// parseArgsConfig parse the command arguments and loads configuration
|
||||
// from a configuration file.
|
||||
func parseArgsConfig() ([]string, *config, error) {
|
||||
p := options.Parser[config]{
|
||||
DefaultConfigLocations: configPaths,
|
||||
ConfigLocation: func(cfg *config) string {
|
||||
return cfg.Config
|
||||
},
|
||||
Usage: "[OPTIONS] domain...",
|
||||
HasVersion: func(cfg *config) bool { return cfg.Version },
|
||||
SetDefaults: func(cfg *config) {
|
||||
cfg.Format = defaultFormat
|
||||
cfg.RemoteValidatorPresets = []string{defaultPreset}
|
||||
},
|
||||
// Re-establish default values if not set.
|
||||
EnsureDefaults: func(cfg *config) {
|
||||
if cfg.Format == "" {
|
||||
cfg.Format = defaultFormat
|
||||
}
|
||||
if cfg.RemoteValidatorPresets == nil {
|
||||
cfg.RemoteValidatorPresets = []string{defaultPreset}
|
||||
}
|
||||
},
|
||||
}
|
||||
return p.Parse()
|
||||
}
|
||||
|
||||
// protectedAccess returns true if we have client certificates or
|
||||
// extra http headers configured.
|
||||
// This may be a wrong assumption, because the certs are not checked
|
||||
// for their domain and custom headers may have other purposes.
|
||||
func (cfg *config) protectedAccess() bool {
|
||||
return len(cfg.clientCerts) > 0 || len(cfg.ExtraHeader) > 0
|
||||
}
|
||||
|
||||
// ignoreFile returns true if the given URL should not be downloaded.
|
||||
func (cfg *config) ignoreURL(u string) bool {
|
||||
return cfg.ignorePattern.Matches(u)
|
||||
}
|
||||
|
||||
// prepare prepares internal state of a loaded configuration.
|
||||
func (cfg *config) prepare() error {
|
||||
|
||||
// Pre-compile the regexes used to check if we need to ignore advisories.
|
||||
if err := cfg.compileIgnorePatterns(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Load client certs.
|
||||
return cfg.prepareCertificates()
|
||||
}
|
||||
|
||||
// compileIgnorePatterns compiles the configure patterns to be ignored.
|
||||
func (cfg *config) compileIgnorePatterns() error {
|
||||
pm, err := filter.NewPatternMatcher(cfg.IgnorePattern)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.ignorePattern = pm
|
||||
return nil
|
||||
}
|
||||
|
||||
// prepareCertificates loads the client side certificates used by the HTTP client.
|
||||
func (cfg *config) prepareCertificates() error {
|
||||
cert, err := certs.LoadCertificate(
|
||||
cfg.ClientCert, cfg.ClientKey, cfg.ClientPassphrase)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.clientCerts = cert
|
||||
return nil
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -13,20 +13,32 @@ import (
|
|||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
type (
|
||||
pageContent struct {
|
||||
err error
|
||||
links map[string]struct{}
|
||||
links util.Set[string]
|
||||
}
|
||||
pages map[string]*pageContent
|
||||
)
|
||||
|
||||
func (pgs pages) listed(path string, pro *processor) (bool, error) {
|
||||
base, err := util.BaseURL(path)
|
||||
func (pgs pages) listed(
|
||||
path string,
|
||||
pro *processor,
|
||||
badDirs util.Set[string],
|
||||
) (bool, error) {
|
||||
pathURL, err := url.Parse(path)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
base, err := util.BaseURL(pathURL)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
|
@ -36,8 +48,7 @@ func (pgs pages) listed(path string, pro *processor) (bool, error) {
|
|||
if content.err != nil {
|
||||
return false, nil
|
||||
}
|
||||
_, ok := content.links[path]
|
||||
return ok, nil
|
||||
return content.links.Contains(path), nil
|
||||
}
|
||||
|
||||
baseURL, err := url.Parse(base)
|
||||
|
|
@ -45,6 +56,10 @@ func (pgs pages) listed(path string, pro *processor) (bool, error) {
|
|||
return false, err
|
||||
}
|
||||
|
||||
if badDirs.Contains(base) {
|
||||
return false, errContinue
|
||||
}
|
||||
|
||||
// load page
|
||||
client := pro.httpClient()
|
||||
pro.checkTLS(base)
|
||||
|
|
@ -54,16 +69,18 @@ func (pgs pages) listed(path string, pro *processor) (bool, error) {
|
|||
|
||||
if err != nil {
|
||||
pro.badDirListings.error("Fetching %s failed: %v", base, err)
|
||||
badDirs.Add(base)
|
||||
return false, errContinue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
pro.badDirListings.error("Fetching %s failed. Status code %d (%s)",
|
||||
base, res.StatusCode, res.Status)
|
||||
badDirs.Add(base)
|
||||
return false, errContinue
|
||||
}
|
||||
|
||||
content = &pageContent{
|
||||
links: map[string]struct{}{},
|
||||
links: util.Set[string]{},
|
||||
}
|
||||
|
||||
pgs[base] = content
|
||||
|
|
@ -78,16 +95,20 @@ func (pgs pages) listed(path string, pro *processor) (bool, error) {
|
|||
return err
|
||||
}
|
||||
// Links may be relative
|
||||
abs := baseURL.ResolveReference(u).String()
|
||||
content.links[abs] = struct{}{}
|
||||
var abs string
|
||||
if u.IsAbs() {
|
||||
abs = u.String()
|
||||
} else {
|
||||
abs = misc.JoinURL(baseURL, u).String()
|
||||
}
|
||||
content.links.Add(abs)
|
||||
return nil
|
||||
})
|
||||
}(); err != nil {
|
||||
return false, errContinue
|
||||
}
|
||||
|
||||
_, ok := content.links[path]
|
||||
return ok, nil
|
||||
return content.links.Contains(path), nil
|
||||
}
|
||||
|
||||
func linksOnPage(r io.Reader, visit func(string) error) error {
|
||||
|
|
|
|||
|
|
@ -1,9 +1,21 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
const page0 = `<html>
|
||||
|
|
@ -23,7 +35,6 @@ const page0 = `<html>
|
|||
</html>`
|
||||
|
||||
func TestLinksOnPage(t *testing.T) {
|
||||
|
||||
var links []string
|
||||
|
||||
err := linksOnPage(
|
||||
|
|
@ -50,3 +61,78 @@ func TestLinksOnPage(t *testing.T) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Test_listed(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
badDirs util.Set[string]
|
||||
path string
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "listed path",
|
||||
badDirs: util.Set[string]{},
|
||||
path: "/white/avendor-advisory-0004.json",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "badDirs contains path",
|
||||
badDirs: util.Set[string]{"/white/": {}},
|
||||
path: "/white/avendor-advisory-0004.json",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "not found",
|
||||
badDirs: util.Set[string]{},
|
||||
path: "/not-found/resource.json",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "badDirs does not contain path",
|
||||
badDirs: util.Set[string]{"/bad-dir/": {}},
|
||||
path: "/white/avendor-advisory-0004.json",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "unlisted path",
|
||||
badDirs: util.Set[string]{},
|
||||
path: "/white/avendor-advisory-0004-not-listed.json",
|
||||
want: false,
|
||||
},
|
||||
}
|
||||
|
||||
t.Parallel()
|
||||
for _, testToRun := range tests {
|
||||
test := testToRun
|
||||
t.Run(test.name, func(tt *testing.T) {
|
||||
tt.Parallel()
|
||||
serverURL := ""
|
||||
fs := http.FileServer(http.Dir("../../testdata/simple-directory-provider"))
|
||||
server := httptest.NewTLSServer(fs)
|
||||
defer server.Close()
|
||||
|
||||
serverURL = server.URL
|
||||
|
||||
hClient := server.Client()
|
||||
client := util.Client(hClient)
|
||||
|
||||
pgs := pages{}
|
||||
cfg := config{RemoteValidator: "", RemoteValidatorCache: ""}
|
||||
p, err := newProcessor(&cfg)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
p.client = client
|
||||
|
||||
badDirs := util.Set[string]{}
|
||||
for dir := range test.badDirs {
|
||||
badDirs.Add(serverURL + dir)
|
||||
}
|
||||
|
||||
got, _ := pgs.listed(serverURL+test.path, p, badDirs)
|
||||
if got != test.want {
|
||||
t.Errorf("%q: Expected %t but got %t.", test.name, test.want, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,161 +1,43 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
// Package main implements the csaf_checker tool.
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
_ "embed" // Used for embedding.
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/jessevdk/go-flags"
|
||||
"github.com/gocsaf/csaf/v3/internal/options"
|
||||
)
|
||||
|
||||
//go:embed tmpl/report.html
|
||||
var reportHTML string
|
||||
|
||||
type options struct {
|
||||
Output string `short:"o" long:"output" description:"File name of the generated report" value-name:"REPORT-FILE"`
|
||||
Format string `short:"f" long:"format" choice:"json" choice:"html" description:"Format of report" default:"json"`
|
||||
Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider"`
|
||||
ClientCert *string `long:"client-cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE"`
|
||||
ClientKey *string `long:"client-key" description:"TLS client private key file (PEM encoded data)" value-name:"KEY-FILE"`
|
||||
Version bool `long:"version" description:"Display version of the binary"`
|
||||
Verbose bool `long:"verbose" short:"v" description:"Verbose output"`
|
||||
Rate *float64 `long:"rate" short:"r" description:"The average upper limit of https operations per second"`
|
||||
}
|
||||
|
||||
func errCheck(err error) {
|
||||
// run uses a processor to check all the given domains or direct urls
|
||||
// and generates a report.
|
||||
func run(cfg *config, domains []string) (*Report, error) {
|
||||
p, err := newProcessor(cfg)
|
||||
if err != nil {
|
||||
if e, ok := err.(*flags.Error); ok && e.Type == flags.ErrHelp {
|
||||
os.Exit(0)
|
||||
}
|
||||
log.Fatalf("error: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
// writeJSON writes the JSON encoding of the given report to the given stream.
|
||||
// It returns nil, otherwise an error.
|
||||
func writeJSON(report *Report, w io.WriteCloser) error {
|
||||
enc := json.NewEncoder(w)
|
||||
enc.SetIndent("", " ")
|
||||
err := enc.Encode(report)
|
||||
if e := w.Close(); err != nil {
|
||||
err = e
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// writeHTML writes the given report to the given writer, it uses the template
|
||||
// in the "reportHTML" variable. It returns nil, otherwise an error.
|
||||
func writeHTML(report *Report, w io.WriteCloser) error {
|
||||
tmpl, err := template.New("Report HTML").Parse(reportHTML)
|
||||
if err != nil {
|
||||
w.Close()
|
||||
return err
|
||||
}
|
||||
buf := bufio.NewWriter(w)
|
||||
|
||||
if err := tmpl.Execute(buf, report); err != nil {
|
||||
w.Close()
|
||||
return err
|
||||
}
|
||||
|
||||
err = buf.Flush()
|
||||
if e := w.Close(); err == nil {
|
||||
err = e
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
type nopCloser struct{ io.Writer }
|
||||
|
||||
func (nc *nopCloser) Close() error { return nil }
|
||||
|
||||
// writeReport defines where to write the report according to the "output" flag option.
|
||||
// It calls also the "writeJSON" or "writeHTML" function according to the "format" flag option.
|
||||
func writeReport(report *Report, opts *options) error {
|
||||
|
||||
var w io.WriteCloser
|
||||
|
||||
if opts.Output == "" {
|
||||
w = &nopCloser{os.Stdout}
|
||||
} else {
|
||||
f, err := os.Create(opts.Output)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
w = f
|
||||
}
|
||||
|
||||
var writer func(*Report, io.WriteCloser) error
|
||||
|
||||
switch opts.Format {
|
||||
case "json":
|
||||
writer = writeJSON
|
||||
default:
|
||||
writer = writeHTML
|
||||
}
|
||||
|
||||
return writer(report, w)
|
||||
}
|
||||
|
||||
// buildReporters initializes each report by assigning a number and description to it.
|
||||
// It returns an array of the reporter interface type.
|
||||
func buildReporters() []reporter {
|
||||
return []reporter{
|
||||
&tlsReporter{baseReporter{num: 3, description: "TLS"}},
|
||||
&redirectsReporter{baseReporter{num: 6, description: "Redirects"}},
|
||||
&providerMetadataReport{baseReporter{num: 7, description: "provider-metadata.json"}},
|
||||
&securityReporter{baseReporter{num: 8, description: "security.txt"}},
|
||||
&wellknownMetadataReporter{baseReporter{num: 9, description: "/.well-known/csaf/provider-metadata.json"}},
|
||||
&dnsPathReporter{baseReporter{num: 10, description: "DNS path"}},
|
||||
&oneFolderPerYearReport{baseReporter{num: 11, description: "One folder per year"}},
|
||||
&indexReporter{baseReporter{num: 12, description: "index.txt"}},
|
||||
&changesReporter{baseReporter{num: 13, description: "changes.csv"}},
|
||||
&directoryListingsReporter{baseReporter{num: 14, description: "Directory listings"}},
|
||||
&integrityReporter{baseReporter{num: 18, description: "Integrity"}},
|
||||
&signaturesReporter{baseReporter{num: 19, description: "Signatures"}},
|
||||
&publicPGPKeyReporter{baseReporter{num: 20, description: "Public OpenPGP Key"}},
|
||||
return nil, err
|
||||
}
|
||||
defer p.close()
|
||||
return p.run(domains)
|
||||
}
|
||||
|
||||
func main() {
|
||||
opts := new(options)
|
||||
|
||||
domains, err := flags.Parse(opts)
|
||||
errCheck(err)
|
||||
|
||||
if opts.Version {
|
||||
fmt.Println(util.SemVersion)
|
||||
return
|
||||
}
|
||||
domains, cfg, err := parseArgsConfig()
|
||||
options.ErrorCheck(err)
|
||||
options.ErrorCheck(cfg.prepare())
|
||||
|
||||
if len(domains) == 0 {
|
||||
log.Println("No domains given.")
|
||||
log.Println("No domain or direct url given.")
|
||||
return
|
||||
}
|
||||
|
||||
if opts.ClientCert != nil && opts.ClientKey == nil || opts.ClientCert == nil && opts.ClientKey != nil {
|
||||
log.Println("Both client-key and client-cert options must be set for the authentication.")
|
||||
return
|
||||
}
|
||||
report, err := run(cfg, domains)
|
||||
options.ErrorCheck(err)
|
||||
|
||||
p := newProcessor(opts)
|
||||
|
||||
report, err := p.run(buildReporters(), domains)
|
||||
errCheck(err)
|
||||
|
||||
errCheck(writeReport(report, opts))
|
||||
options.ErrorCheck(report.write(cfg.Format, cfg.Output))
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
259
cmd/csaf_checker/processor_test.go
Normal file
259
cmd/csaf_checker/processor_test.go
Normal file
|
|
@ -0,0 +1,259 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"reflect"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
"text/template"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/testutil"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
func getRequirementTestData(t *testing.T, params testutil.ProviderParams, directoryProvider bool) []Requirement {
|
||||
path := "../../testdata/processor-requirements/"
|
||||
if params.EnableSha256 {
|
||||
path += "sha256-"
|
||||
}
|
||||
if params.EnableSha512 {
|
||||
path += "sha512-"
|
||||
}
|
||||
if params.ForbidSha256 {
|
||||
path += "forbid-sha256-"
|
||||
}
|
||||
if params.ForbidSha512 {
|
||||
path += "forbid-sha512-"
|
||||
}
|
||||
if directoryProvider {
|
||||
path += "directory"
|
||||
} else {
|
||||
path += "rolie"
|
||||
}
|
||||
path += ".json"
|
||||
|
||||
content, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
tmplt, err := template.New("base").Parse(string(content))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
var output bytes.Buffer
|
||||
err = tmplt.Execute(&output, params)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
var requirement []Requirement
|
||||
err = json.Unmarshal(output.Bytes(), &requirement)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return requirement
|
||||
}
|
||||
|
||||
func TestContentTypeReport(t *testing.T) {
|
||||
serverURL := ""
|
||||
params := testutil.ProviderParams{
|
||||
URL: "",
|
||||
EnableSha256: true,
|
||||
EnableSha512: true,
|
||||
ForbidSha256: true,
|
||||
ForbidSha512: true,
|
||||
JSONContentType: "application/json; charset=utf-8",
|
||||
}
|
||||
server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, false))
|
||||
defer server.Close()
|
||||
|
||||
serverURL = server.URL
|
||||
params.URL = server.URL
|
||||
|
||||
hClient := server.Client()
|
||||
client := util.Client(hClient)
|
||||
|
||||
cfg := config{}
|
||||
err := cfg.prepare()
|
||||
if err != nil {
|
||||
t.Fatalf("SHA marking config failed: %v", err)
|
||||
}
|
||||
p, err := newProcessor(&cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("could not init downloader: %v", err)
|
||||
}
|
||||
p.client = client
|
||||
|
||||
report, err := p.run([]string{serverURL + "/provider-metadata.json"})
|
||||
if err != nil {
|
||||
t.Errorf("Content-Type-Report: Expected no error, got: %v", err)
|
||||
}
|
||||
|
||||
got := report.Domains[0].Requirements
|
||||
idx := slices.IndexFunc(got, func(e *Requirement) bool {
|
||||
return e.Num == 7
|
||||
})
|
||||
if idx == -1 {
|
||||
t.Error("Content-Type-Report: Could not find requirement")
|
||||
} else {
|
||||
message := got[idx].Messages[0]
|
||||
if message.Type != ErrorType || !strings.Contains(message.Text, "should be 'application/json'") {
|
||||
t.Errorf("Content-Type-Report: Content Type Error, got %v", message)
|
||||
}
|
||||
}
|
||||
|
||||
p.close()
|
||||
}
|
||||
|
||||
func TestShaMarking(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
directoryProvider bool
|
||||
enableSha256 bool
|
||||
enableSha512 bool
|
||||
forbidSha256 bool
|
||||
forbidSha512 bool
|
||||
}{
|
||||
{
|
||||
name: "deliver sha256 and sha512",
|
||||
directoryProvider: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
},
|
||||
{
|
||||
name: "enable sha256 and sha512, forbid fetching",
|
||||
directoryProvider: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
forbidSha256: true,
|
||||
forbidSha512: true,
|
||||
},
|
||||
{
|
||||
name: "enable sha256 and sha512, forbid sha256",
|
||||
directoryProvider: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
forbidSha256: true,
|
||||
forbidSha512: false,
|
||||
},
|
||||
{
|
||||
name: "enable sha256 and sha512, forbid sha512",
|
||||
directoryProvider: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
forbidSha256: false,
|
||||
forbidSha512: true,
|
||||
},
|
||||
{
|
||||
name: "only deliver sha256",
|
||||
directoryProvider: false,
|
||||
enableSha256: true,
|
||||
enableSha512: false,
|
||||
},
|
||||
{
|
||||
name: "only deliver sha512",
|
||||
directoryProvider: false,
|
||||
enableSha256: false,
|
||||
enableSha512: true,
|
||||
},
|
||||
{
|
||||
name: "deliver sha256 and sha512, directory provider",
|
||||
directoryProvider: true,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
},
|
||||
{
|
||||
name: "only deliver sha256, directory provider",
|
||||
directoryProvider: true,
|
||||
enableSha256: true,
|
||||
enableSha512: false,
|
||||
},
|
||||
{
|
||||
name: "only deliver sha512, directory provider",
|
||||
directoryProvider: true,
|
||||
enableSha256: false,
|
||||
enableSha512: true,
|
||||
},
|
||||
{
|
||||
name: "no hash",
|
||||
directoryProvider: false,
|
||||
enableSha256: false,
|
||||
enableSha512: false,
|
||||
},
|
||||
{
|
||||
name: "no hash, directory provider",
|
||||
directoryProvider: true,
|
||||
enableSha256: false,
|
||||
enableSha512: false,
|
||||
},
|
||||
}
|
||||
|
||||
t.Parallel()
|
||||
for _, testToRun := range tests {
|
||||
test := testToRun
|
||||
t.Run(test.name, func(tt *testing.T) {
|
||||
tt.Parallel()
|
||||
serverURL := ""
|
||||
params := testutil.ProviderParams{
|
||||
URL: "",
|
||||
EnableSha256: test.enableSha256,
|
||||
EnableSha512: test.enableSha512,
|
||||
ForbidSha256: test.forbidSha256,
|
||||
ForbidSha512: test.forbidSha512,
|
||||
}
|
||||
server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider))
|
||||
defer server.Close()
|
||||
|
||||
serverURL = server.URL
|
||||
params.URL = server.URL
|
||||
|
||||
hClient := server.Client()
|
||||
client := util.Client(hClient)
|
||||
|
||||
cfg := config{}
|
||||
err := cfg.prepare()
|
||||
if err != nil {
|
||||
t.Fatalf("SHA marking config failed: %v", err)
|
||||
}
|
||||
p, err := newProcessor(&cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("could not init downloader: %v", err)
|
||||
}
|
||||
p.client = client
|
||||
|
||||
report, err := p.run([]string{serverURL + "/provider-metadata.json"})
|
||||
if err != nil {
|
||||
t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err)
|
||||
}
|
||||
expected := getRequirementTestData(t,
|
||||
testutil.ProviderParams{
|
||||
URL: serverURL,
|
||||
EnableSha256: test.enableSha256,
|
||||
EnableSha512: test.enableSha512,
|
||||
ForbidSha256: test.forbidSha256,
|
||||
ForbidSha512: test.forbidSha512,
|
||||
},
|
||||
test.directoryProvider)
|
||||
for i, got := range report.Domains[0].Requirements {
|
||||
if !reflect.DeepEqual(expected[i], *got) {
|
||||
t.Errorf("SHA marking %v: Expected %v, got %v", test.name, expected[i], *got)
|
||||
}
|
||||
}
|
||||
|
||||
p.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1,14 +1,26 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import "fmt"
|
||||
import (
|
||||
"bufio"
|
||||
_ "embed" // Used for embedding.
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/internal/models"
|
||||
)
|
||||
|
||||
// MessageType is the kind of the message.
|
||||
type MessageType int
|
||||
|
|
@ -38,14 +50,26 @@ type Requirement struct {
|
|||
// Domain are the results of a domain.
|
||||
type Domain struct {
|
||||
Name string `json:"name"`
|
||||
Publisher *csaf.Publisher `json:"publisher,omitempty"`
|
||||
Role *csaf.MetadataRole `json:"role,omitempty"`
|
||||
Requirements []*Requirement `json:"requirements,omitempty"`
|
||||
Passed bool `json:"passed"`
|
||||
}
|
||||
|
||||
// ReportTime stores the time of the report.
|
||||
type ReportTime struct{ time.Time }
|
||||
|
||||
// Report is the overall report.
|
||||
type Report struct {
|
||||
Domains []*Domain `json:"domains,omitempty"`
|
||||
Version string `json:"version,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Date ReportTime `json:"date,omitempty"`
|
||||
TimeRange *models.TimeRange `json:"timerange,omitempty"`
|
||||
}
|
||||
|
||||
// MarshalText implements the encoding.TextMarshaller interface.
|
||||
func (rt ReportTime) MarshalText() ([]byte, error) {
|
||||
return []byte(rt.Format(time.RFC3339)), nil
|
||||
}
|
||||
|
||||
// HasErrors tells if this requirement has errors.
|
||||
|
|
@ -58,14 +82,14 @@ func (r *Requirement) HasErrors() bool {
|
|||
return false
|
||||
}
|
||||
|
||||
// Append appends messages to requirement.
|
||||
func (r *Requirement) Append(msgs []Message) {
|
||||
r.Messages = append(r.Messages, msgs...)
|
||||
}
|
||||
|
||||
// HasErrors tells if this domain has errors.
|
||||
func (d *Domain) HasErrors() bool {
|
||||
for _, r := range d.Requirements {
|
||||
if r.HasErrors() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return !d.Passed
|
||||
}
|
||||
|
||||
// String implements fmt.Stringer interface.
|
||||
|
|
@ -88,3 +112,72 @@ func (r *Requirement) message(typ MessageType, texts ...string) {
|
|||
r.Messages = append(r.Messages, Message{Type: typ, Text: text})
|
||||
}
|
||||
}
|
||||
|
||||
// writeJSON writes the JSON encoding of the given report to the given stream.
|
||||
// It returns nil, otherwise an error.
|
||||
func (r *Report) writeJSON(w io.WriteCloser) error {
|
||||
enc := json.NewEncoder(w)
|
||||
enc.SetIndent("", " ")
|
||||
err := enc.Encode(r)
|
||||
if e := w.Close(); err != nil {
|
||||
err = e
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
//go:embed tmpl/report.html
|
||||
var reportHTML string
|
||||
|
||||
// writeHTML writes the given report to the given writer, it uses the template
|
||||
// in the "reportHTML" variable. It returns nil, otherwise an error.
|
||||
func (r *Report) writeHTML(w io.WriteCloser) error {
|
||||
tmpl, err := template.New("Report HTML").Parse(reportHTML)
|
||||
if err != nil {
|
||||
w.Close()
|
||||
return err
|
||||
}
|
||||
buf := bufio.NewWriter(w)
|
||||
|
||||
if err := tmpl.Execute(buf, r); err != nil {
|
||||
w.Close()
|
||||
return err
|
||||
}
|
||||
|
||||
err = buf.Flush()
|
||||
if e := w.Close(); err == nil {
|
||||
err = e
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
type nopCloser struct{ io.Writer }
|
||||
|
||||
func (nc *nopCloser) Close() error { return nil }
|
||||
|
||||
// write defines where to write the report according to the "output" flag option.
|
||||
// It calls also the "writeJSON" or "writeHTML" function according to the "format" flag option.
|
||||
func (r *Report) write(format outputFormat, output string) error {
|
||||
|
||||
var w io.WriteCloser
|
||||
|
||||
if output == "" {
|
||||
w = &nopCloser{os.Stdout}
|
||||
} else {
|
||||
f, err := os.Create(output)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
w = f
|
||||
}
|
||||
|
||||
var writer func(*Report, io.WriteCloser) error
|
||||
|
||||
switch format {
|
||||
case "json":
|
||||
writer = (*Report).writeJSON
|
||||
default:
|
||||
writer = (*Report).writeHTML
|
||||
}
|
||||
|
||||
return writer(r, w)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -11,6 +11,9 @@ package main
|
|||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
type (
|
||||
|
|
@ -18,7 +21,11 @@ type (
|
|||
num int
|
||||
description string
|
||||
}
|
||||
validReporter struct{ baseReporter }
|
||||
filenameReporter struct{ baseReporter }
|
||||
tlsReporter struct{ baseReporter }
|
||||
tlpWhiteReporter struct{ baseReporter }
|
||||
tlpAmberRedReporter struct{ baseReporter }
|
||||
redirectsReporter struct{ baseReporter }
|
||||
providerMetadataReport struct{ baseReporter }
|
||||
securityReporter struct{ baseReporter }
|
||||
|
|
@ -28,11 +35,43 @@ type (
|
|||
indexReporter struct{ baseReporter }
|
||||
changesReporter struct{ baseReporter }
|
||||
directoryListingsReporter struct{ baseReporter }
|
||||
rolieFeedReporter struct{ baseReporter }
|
||||
rolieServiceReporter struct{ baseReporter }
|
||||
rolieCategoryReporter struct{ baseReporter }
|
||||
integrityReporter struct{ baseReporter }
|
||||
signaturesReporter struct{ baseReporter }
|
||||
publicPGPKeyReporter struct{ baseReporter }
|
||||
listReporter struct{ baseReporter }
|
||||
hasTwoReporter struct{ baseReporter }
|
||||
mirrorReporter struct{ baseReporter }
|
||||
)
|
||||
|
||||
var reporters = [...]reporter{
|
||||
1: &validReporter{baseReporter{num: 1, description: "Valid CSAF documents"}},
|
||||
2: &filenameReporter{baseReporter{num: 2, description: "Filename"}},
|
||||
3: &tlsReporter{baseReporter{num: 3, description: "TLS"}},
|
||||
4: &tlpWhiteReporter{baseReporter{num: 4, description: "TLP:WHITE"}},
|
||||
5: &tlpAmberRedReporter{baseReporter{num: 5, description: "TLP:AMBER and TLP:RED"}},
|
||||
6: &redirectsReporter{baseReporter{num: 6, description: "Redirects"}},
|
||||
7: &providerMetadataReport{baseReporter{num: 7, description: "provider-metadata.json"}},
|
||||
8: &securityReporter{baseReporter{num: 8, description: "security.txt"}},
|
||||
9: &wellknownMetadataReporter{baseReporter{num: 9, description: "/.well-known/csaf/provider-metadata.json"}},
|
||||
10: &dnsPathReporter{baseReporter{num: 10, description: "DNS path"}},
|
||||
11: &oneFolderPerYearReport{baseReporter{num: 11, description: "One folder per year"}},
|
||||
12: &indexReporter{baseReporter{num: 12, description: "index.txt"}},
|
||||
13: &changesReporter{baseReporter{num: 13, description: "changes.csv"}},
|
||||
14: &directoryListingsReporter{baseReporter{num: 14, description: "Directory listings"}},
|
||||
15: &rolieFeedReporter{baseReporter{num: 15, description: "ROLIE feed"}},
|
||||
16: &rolieServiceReporter{baseReporter{num: 16, description: "ROLIE service document"}},
|
||||
17: &rolieCategoryReporter{baseReporter{num: 17, description: "ROLIE category document"}},
|
||||
18: &integrityReporter{baseReporter{num: 18, description: "Integrity"}},
|
||||
19: &signaturesReporter{baseReporter{num: 19, description: "Signatures"}},
|
||||
20: &publicPGPKeyReporter{baseReporter{num: 20, description: "Public OpenPGP Key"}},
|
||||
21: &listReporter{baseReporter{num: 21, description: "List of CSAF providers"}},
|
||||
22: &hasTwoReporter{baseReporter{num: 22, description: "Two disjoint issuing parties"}},
|
||||
23: &mirrorReporter{baseReporter{num: 23, description: "Mirror"}},
|
||||
}
|
||||
|
||||
func (bc *baseReporter) requirement(domain *Domain) *Requirement {
|
||||
req := &Requirement{
|
||||
Num: bc.num,
|
||||
|
|
@ -42,6 +81,51 @@ func (bc *baseReporter) requirement(domain *Domain) *Requirement {
|
|||
return req
|
||||
}
|
||||
|
||||
// contains returns whether any of vs is present in s.
|
||||
func containsAny[E comparable](s []E, vs ...E) bool {
|
||||
for _, e := range s {
|
||||
for _, v := range vs {
|
||||
if e == v {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// report reports if there where any invalid filenames,
|
||||
func (r *validReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if p.validator == nil {
|
||||
req.message(WarnType, "No remote validator configured")
|
||||
}
|
||||
switch {
|
||||
case !p.invalidAdvisories.used():
|
||||
req.message(InfoType, "No validations performed")
|
||||
case len(p.invalidAdvisories) == 0:
|
||||
if p.validator != nil && containsAny(p.cfg.RemoteValidatorPresets,
|
||||
"basic", "mandatory", "extended", "full") {
|
||||
req.message(InfoType, "All advisories validated fine.")
|
||||
} else {
|
||||
req.message(InfoType, "All advisories validated fine against the schema.")
|
||||
}
|
||||
default:
|
||||
req.Append(p.invalidAdvisories)
|
||||
}
|
||||
}
|
||||
|
||||
// report reports if there where any bad filename.
|
||||
func (r *filenameReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badFilenames.used() {
|
||||
req.message(InfoType, "No filenames checked for conformance")
|
||||
} else if len(p.badFilenames) == 0 {
|
||||
req.message(InfoType, "All found filenames are conforming.")
|
||||
} else {
|
||||
req.Append(p.badFilenames)
|
||||
}
|
||||
}
|
||||
|
||||
// report tests if the URLs are HTTPS and sets the "message" field value
|
||||
// of the "Requirement" struct as a result of that.
|
||||
// A list of non HTTPS URLs is included in the value of the "message" field.
|
||||
|
|
@ -67,6 +151,39 @@ func (r *tlsReporter) report(p *processor, domain *Domain) {
|
|||
req.message(ErrorType, urls...)
|
||||
}
|
||||
|
||||
// report tests if a document labeled TLP:WHITE
|
||||
// is freely accessible and sets the "message" field value
|
||||
// of the "Requirement" struct as a result of that.
|
||||
func (r *tlpWhiteReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badWhitePermissions.used() {
|
||||
req.message(InfoType, "No access-protected advisories labeled TLP:WHITE found.")
|
||||
return
|
||||
}
|
||||
if len(p.badWhitePermissions) == 0 {
|
||||
req.message(InfoType, "All advisories labeled TLP:WHITE were freely accessible.")
|
||||
return
|
||||
}
|
||||
req.Messages = p.badWhitePermissions
|
||||
}
|
||||
|
||||
// report tests if a document labeled TLP:AMBER
|
||||
// or TLP:RED is access protected
|
||||
// and sets the "message" field value
|
||||
// of the "Requirement" struct as a result of that.
|
||||
func (r *tlpAmberRedReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badAmberRedPermissions.used() {
|
||||
req.message(InfoType, "No advisories labeled TLP:AMBER or TLP:RED tested for accessibility.")
|
||||
return
|
||||
}
|
||||
if len(p.badAmberRedPermissions) == 0 {
|
||||
req.message(InfoType, "All tested advisories labeled TLP:AMBER or TLP:RED were access-protected.")
|
||||
return
|
||||
}
|
||||
req.Messages = p.badAmberRedPermissions
|
||||
}
|
||||
|
||||
// report tests if redirects are used and sets the "message" field value
|
||||
// of the "Requirement" struct as a result of that.
|
||||
func (r *redirectsReporter) report(p *processor, domain *Domain) {
|
||||
|
|
@ -76,19 +193,41 @@ func (r *redirectsReporter) report(p *processor, domain *Domain) {
|
|||
return
|
||||
}
|
||||
|
||||
keys := make([]string, len(p.redirects))
|
||||
var i int
|
||||
for k := range p.redirects {
|
||||
keys[i] = k
|
||||
i++
|
||||
keys := keysNotInValues(p.redirects)
|
||||
|
||||
first := func(i int) string {
|
||||
if vs := p.redirects[keys[i]]; len(vs) > 0 {
|
||||
return vs[0]
|
||||
}
|
||||
sort.Strings(keys)
|
||||
return ""
|
||||
}
|
||||
|
||||
sort.Slice(keys, func(i, j int) bool { return first(i) < first(j) })
|
||||
|
||||
for i, k := range keys {
|
||||
keys[i] = fmt.Sprintf("Redirect %s: %s", k, p.redirects[k])
|
||||
keys[i] = fmt.Sprintf("Redirect %s -> %s", strings.Join(p.redirects[k], " -> "), k)
|
||||
}
|
||||
req.message(WarnType, keys...)
|
||||
}
|
||||
|
||||
// keysNotInValues returns a slice of keys which are not in the values
|
||||
// of the given map.
|
||||
func keysNotInValues(m map[string][]string) []string {
|
||||
values := util.Set[string]{}
|
||||
for _, vs := range m {
|
||||
for _, v := range vs {
|
||||
values.Add(v)
|
||||
}
|
||||
}
|
||||
keys := make([]string, 0, len(m))
|
||||
for k := range m {
|
||||
if !values.Contains(k) {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
// report tests if an provider-metadata.json are available and sets the
|
||||
// "message" field value of the "Requirement" struct as a result of that.
|
||||
func (r *providerMetadataReport) report(p *processor, domain *Domain) {
|
||||
|
|
@ -109,21 +248,17 @@ func (r *providerMetadataReport) report(p *processor, domain *Domain) {
|
|||
func (r *securityReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badSecurity.used() {
|
||||
req.message(InfoType, "No security.txt checked.")
|
||||
return
|
||||
}
|
||||
if len(p.badSecurity) == 0 {
|
||||
req.message(InfoType, "Found good security.txt.")
|
||||
req.message(WarnType, "Performed no in-depth test of security.txt.")
|
||||
return
|
||||
}
|
||||
req.Messages = p.badSecurity
|
||||
}
|
||||
|
||||
//report tests the availability of the "provider-metadata.json" under /.well-known/csaf/ directoy.
|
||||
// report tests the availability of the "provider-metadata.json" under /.well-known/csaf/ directoy.
|
||||
func (r *wellknownMetadataReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badWellknownMetadata.used() {
|
||||
req.message(InfoType, "No check if provider-metadata.json is under /.well-known/csaf/ was done.")
|
||||
req.message(WarnType, "Since no valid provider-metadata.json was found, no extended check was performed.")
|
||||
return
|
||||
}
|
||||
if len(p.badWellknownMetadata) == 0 {
|
||||
|
|
@ -133,11 +268,11 @@ func (r *wellknownMetadataReporter) report(p *processor, domain *Domain) {
|
|||
req.Messages = p.badWellknownMetadata
|
||||
}
|
||||
|
||||
// report tests if the "csaf.data.security.domain.tld" DNS record available and serves the "provider-metadata.json"
|
||||
// report outputs the result of the the explicit DNS test.
|
||||
func (r *dnsPathReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badDNSPath.used() {
|
||||
req.message(InfoType, "No download from https://csaf.data.security.DOMAIN attempted.")
|
||||
req.message(WarnType, "No check about contents from https://csaf.data.security.DOMAIN performed.")
|
||||
return
|
||||
}
|
||||
if len(p.badDNSPath) == 0 {
|
||||
|
|
@ -149,10 +284,6 @@ func (r *dnsPathReporter) report(p *processor, domain *Domain) {
|
|||
|
||||
func (r *oneFolderPerYearReport) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badFolders.used() {
|
||||
req.message(InfoType, "No checks if files are in right folders were performed.")
|
||||
return
|
||||
}
|
||||
if len(p.badFolders) == 0 {
|
||||
req.message(InfoType, "All CSAF files are in the right folders.")
|
||||
return
|
||||
|
|
@ -163,7 +294,7 @@ func (r *oneFolderPerYearReport) report(p *processor, domain *Domain) {
|
|||
func (r *indexReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badIndices.used() {
|
||||
req.message(InfoType, "No index.txt checked.")
|
||||
req.message(ErrorType, "No index.txt checked.")
|
||||
return
|
||||
}
|
||||
if len(p.badIndices) == 0 {
|
||||
|
|
@ -176,7 +307,7 @@ func (r *indexReporter) report(p *processor, domain *Domain) {
|
|||
func (r *changesReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badChanges.used() {
|
||||
req.message(InfoType, "No changes.csv checked.")
|
||||
req.message(ErrorType, "No changes.csv checked.")
|
||||
return
|
||||
}
|
||||
if len(p.badChanges) == 0 {
|
||||
|
|
@ -189,7 +320,7 @@ func (r *changesReporter) report(p *processor, domain *Domain) {
|
|||
func (r *directoryListingsReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badDirListings.used() {
|
||||
req.message(InfoType, "No directory listings checked.")
|
||||
req.message(WarnType, "No directory listings checked.")
|
||||
return
|
||||
}
|
||||
if len(p.badDirListings) == 0 {
|
||||
|
|
@ -199,6 +330,56 @@ func (r *directoryListingsReporter) report(p *processor, domain *Domain) {
|
|||
req.Messages = p.badDirListings
|
||||
}
|
||||
|
||||
// report checks whether there is only a single ROLIE feed for a
|
||||
// given TLP level and whether any of the TLP levels
|
||||
// TLP:WHITE, TLP:GREEN or unlabeled exists and sets the "message" field value
|
||||
// of the "Requirement" struct as a result of that.
|
||||
func (r *rolieFeedReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if len(p.badROLIEFeed) == 0 {
|
||||
req.message(InfoType, "All checked ROLIE feeds validated fine.")
|
||||
return
|
||||
}
|
||||
req.Messages = p.badROLIEFeed
|
||||
}
|
||||
|
||||
// report tests whether a ROLIE service document is used and if so,
|
||||
// whether it is a [RFC8322] conform JSON file that lists the
|
||||
// ROLIE feed documents and sets the "message" field value
|
||||
// of the "Requirement" struct as a result of that.
|
||||
func (r *rolieServiceReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badROLIEService.used() {
|
||||
req.message(WarnType, "ROLIE service document was not checked.")
|
||||
return
|
||||
}
|
||||
if len(p.badROLIEService) == 0 {
|
||||
req.message(InfoType, "ROLIE service document validated fine.")
|
||||
return
|
||||
}
|
||||
req.Messages = p.badROLIEService
|
||||
|
||||
}
|
||||
|
||||
// report tests whether a ROLIE category document is used and if so,
|
||||
// whether it is a [RFC8322] conform JSON file and is used to dissect
|
||||
// documents by certain criteria
|
||||
// and sets the "message" field value
|
||||
// of the "Requirement" struct as a result of that.
|
||||
func (r *rolieCategoryReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badROLIECategory.used() {
|
||||
req.message(WarnType, "No checks on the existence of ROLIE category documents performed.")
|
||||
return
|
||||
}
|
||||
if len(p.badROLIECategory) == 0 {
|
||||
req.message(InfoType, "All checked ROLIE category documents exist.")
|
||||
return
|
||||
}
|
||||
req.Messages = p.badROLIECategory
|
||||
|
||||
}
|
||||
|
||||
func (r *integrityReporter) report(p *processor, domain *Domain) {
|
||||
req := r.requirement(domain)
|
||||
if !p.badIntegrities.used() {
|
||||
|
|
@ -231,7 +412,30 @@ func (r *publicPGPKeyReporter) report(p *processor, domain *Domain) {
|
|||
return
|
||||
}
|
||||
req.Messages = p.badPGPs
|
||||
if len(p.keys) > 0 {
|
||||
req.message(InfoType, fmt.Sprintf("%d public OpenPGP key(s) loaded.", len(p.keys)))
|
||||
if p.keys != nil {
|
||||
req.message(InfoType, fmt.Sprintf("%d public OpenPGP key(s) loaded.",
|
||||
p.keys.CountEntities()))
|
||||
}
|
||||
}
|
||||
|
||||
// report tests whether a CSAF aggregator JSON schema conform
|
||||
// aggregator.json exists without being adjacent to a
|
||||
// provider-metadata.json
|
||||
func (r *listReporter) report(_ *processor, _ *Domain) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
// report tests whether the aggregator.json lists at least
|
||||
// two disjoint issuing parties. TODO: reevaluate phrasing (Req 7.1.22)
|
||||
func (r *hasTwoReporter) report(_ *processor, _ *Domain) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
// report tests whether the CSAF documents of each issuing mirrored party
|
||||
// is in a different folder, which are adjacent to the aggregator.json and
|
||||
// if the folder name is retrieved from the name of the issuing authority.
|
||||
// It also tests whether each folder has a provider-metadata.json for their
|
||||
// party and provides ROLIE feed documents.
|
||||
func (r *mirrorReporter) report(_ *processor, _ *Domain) {
|
||||
// TODO
|
||||
}
|
||||
|
|
|
|||
510
cmd/csaf_checker/roliecheck.go
Normal file
510
cmd/csaf_checker/roliecheck.go
Normal file
|
|
@ -0,0 +1,510 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
// identifier consist of document/tracking/id and document/publisher/namespace,
|
||||
// which in sum are unique for each csaf document and the name of a csaf document
|
||||
type identifier struct {
|
||||
id string
|
||||
namespace string
|
||||
}
|
||||
|
||||
// String implements fmt.Stringer
|
||||
func (id identifier) String() string {
|
||||
return "(" + id.namespace + ", " + id.id + ")"
|
||||
}
|
||||
|
||||
// labelChecker helps to check if advisories are of the right TLP color.
|
||||
type labelChecker struct {
|
||||
feedURL string
|
||||
feedLabel csaf.TLPLabel
|
||||
|
||||
advisories map[csaf.TLPLabel]util.Set[string]
|
||||
whiteAdvisories map[identifier]bool
|
||||
}
|
||||
|
||||
// reset brings the checker back to an initial state.
|
||||
func (lc *labelChecker) reset() {
|
||||
lc.feedLabel = ""
|
||||
lc.feedURL = ""
|
||||
lc.advisories = map[csaf.TLPLabel]util.Set[string]{}
|
||||
lc.whiteAdvisories = map[identifier]bool{}
|
||||
}
|
||||
|
||||
// tlpLevel returns an inclusion order of TLP colors.
|
||||
func tlpLevel(label csaf.TLPLabel) int {
|
||||
switch label {
|
||||
case csaf.TLPLabelWhite:
|
||||
return 1
|
||||
case csaf.TLPLabelGreen:
|
||||
return 2
|
||||
case csaf.TLPLabelAmber:
|
||||
return 3
|
||||
case csaf.TLPLabelRed:
|
||||
return 4
|
||||
default:
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
// extractTLP extracts the tlp label of the given document
|
||||
// and defaults to UNLABELED if not found.
|
||||
func (p *processor) extractTLP(doc any) csaf.TLPLabel {
|
||||
labelString, err := p.expr.Eval(`$.document.distribution.tlp.label`, doc)
|
||||
if err != nil {
|
||||
return csaf.TLPLabelUnlabeled
|
||||
}
|
||||
label, ok := labelString.(string)
|
||||
if !ok {
|
||||
return csaf.TLPLabelUnlabeled
|
||||
}
|
||||
return csaf.TLPLabel(label)
|
||||
}
|
||||
|
||||
// check tests if the TLP label of an advisory is used correctly.
|
||||
func (lc *labelChecker) check(
|
||||
p *processor,
|
||||
doc any,
|
||||
url string,
|
||||
) {
|
||||
label := p.extractTLP(doc)
|
||||
|
||||
// Check the permissions.
|
||||
lc.checkPermissions(p, label, doc, url)
|
||||
|
||||
// Associate advisory label to urls.
|
||||
lc.add(label, url)
|
||||
|
||||
// If entry shows up in feed of higher tlp level, give out info or warning.
|
||||
lc.checkRank(p, label, url)
|
||||
}
|
||||
|
||||
// checkPermissions checks for mistakes in access-protection.
|
||||
func (lc *labelChecker) checkPermissions(
|
||||
p *processor,
|
||||
label csaf.TLPLabel,
|
||||
doc any,
|
||||
url string,
|
||||
) {
|
||||
switch label {
|
||||
case csaf.TLPLabelAmber, csaf.TLPLabelRed:
|
||||
// If the client has no authorization it shouldn't be able
|
||||
// to access TLP:AMBER or TLP:RED advisories
|
||||
p.badAmberRedPermissions.use()
|
||||
if !p.usedAuthorizedClient() {
|
||||
p.badAmberRedPermissions.error(
|
||||
"Advisory %s of TLP level %v is not access protected.",
|
||||
url, label)
|
||||
} else {
|
||||
res, err := p.unauthorizedClient().Get(url)
|
||||
if err != nil {
|
||||
p.badAmberRedPermissions.error(
|
||||
"Unexpected Error %v when trying to fetch: %s", err, url)
|
||||
} else if res.StatusCode == http.StatusOK {
|
||||
p.badAmberRedPermissions.error(
|
||||
"Advisory %s of TLP level %v is not properly access protected.",
|
||||
url, label)
|
||||
}
|
||||
}
|
||||
|
||||
case csaf.TLPLabelWhite:
|
||||
// If we found a white labeled document we need to track it
|
||||
// to find out later if there was an unprotected way to access it.
|
||||
|
||||
p.badWhitePermissions.use()
|
||||
// Being not able to extract the identifier from the document
|
||||
// indicates that the document is not valid. Should not happen
|
||||
// as the schema validation passed before.
|
||||
p.invalidAdvisories.use()
|
||||
if id, err := p.extractAdvisoryIdentifier(doc); err != nil {
|
||||
p.invalidAdvisories.error("Bad document %s: %v", url, err)
|
||||
} else if !lc.whiteAdvisories[id] {
|
||||
// Only do check if we haven't seen it as accessible before.
|
||||
|
||||
if !p.usedAuthorizedClient() {
|
||||
// We already downloaded it without protection
|
||||
lc.whiteAdvisories[id] = true
|
||||
} else {
|
||||
// Need to try to re-download it unauthorized.
|
||||
if resp, err := p.unauthorizedClient().Get(url); err == nil {
|
||||
accessible := resp.StatusCode == http.StatusOK
|
||||
lc.whiteAdvisories[id] = accessible
|
||||
// If we are in a white rolie feed or in a dirlisting
|
||||
// directly warn if we cannot access it.
|
||||
// The cases of being in an amber or red feed are resolved.
|
||||
if !accessible &&
|
||||
(lc.feedLabel == "" || lc.feedLabel == csaf.TLPLabelWhite) {
|
||||
p.badWhitePermissions.warn(
|
||||
"Advisory %s of TLP level WHITE is access-protected.", url)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add registers a given url to a label.
|
||||
func (lc *labelChecker) add(label csaf.TLPLabel, url string) {
|
||||
advs := lc.advisories[label]
|
||||
if advs == nil {
|
||||
advs = util.Set[string]{}
|
||||
lc.advisories[label] = advs
|
||||
}
|
||||
advs.Add(url)
|
||||
}
|
||||
|
||||
// checkRank tests if a given advisory is contained by the
|
||||
// the right feed color.
|
||||
func (lc *labelChecker) checkRank(
|
||||
p *processor,
|
||||
label csaf.TLPLabel,
|
||||
url string,
|
||||
) {
|
||||
// Only do this check when we are inside a ROLIE feed.
|
||||
if lc.feedLabel == "" {
|
||||
return
|
||||
}
|
||||
|
||||
switch advisoryRank, feedRank := tlpLevel(label), tlpLevel(lc.feedLabel); {
|
||||
|
||||
case advisoryRank < feedRank:
|
||||
if advisoryRank == 0 { // All kinds of 'UNLABELED'
|
||||
p.badROLIEFeed.info(
|
||||
"Found unlabeled advisory %q in feed %q.",
|
||||
url, lc.feedURL)
|
||||
} else {
|
||||
p.badROLIEFeed.warn(
|
||||
"Found advisory %q labled TLP:%s in feed %q (TLP:%s).",
|
||||
url, label,
|
||||
lc.feedURL, lc.feedLabel)
|
||||
}
|
||||
|
||||
case advisoryRank > feedRank:
|
||||
// Must not happen, give error
|
||||
p.badROLIEFeed.error(
|
||||
"%s of TLP level %s must not be listed in feed %s of TLP level %s",
|
||||
url, label, lc.feedURL, lc.feedLabel)
|
||||
}
|
||||
}
|
||||
|
||||
// defaults returns the value of the referencend pointer p
|
||||
// if it is not nil, def otherwise.
|
||||
func defaults[T any](p *T, def T) T {
|
||||
if p != nil {
|
||||
return *p
|
||||
}
|
||||
return def
|
||||
}
|
||||
|
||||
// processROLIEFeeds goes through all ROLIE feeds and checks their
|
||||
// integrity and completeness.
|
||||
func (p *processor) processROLIEFeeds(feeds [][]csaf.Feed) error {
|
||||
p.badROLIEFeed.use()
|
||||
|
||||
advisories := map[*csaf.Feed][]csaf.AdvisoryFile{}
|
||||
|
||||
// Phase 1: load all advisories urls.
|
||||
for _, fs := range feeds {
|
||||
for i := range fs {
|
||||
feed := &fs[i]
|
||||
if feed.URL == nil {
|
||||
continue
|
||||
}
|
||||
feedBase, err := url.Parse(string(*feed.URL))
|
||||
if err != nil {
|
||||
p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||
continue
|
||||
}
|
||||
feedURL := feedBase.String()
|
||||
p.checkTLS(feedURL)
|
||||
|
||||
advs, err := p.rolieFeedEntries(feedURL)
|
||||
if err != nil {
|
||||
if err != errContinue {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
advisories[feed] = advs
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 2: check for integrity.
|
||||
for _, fs := range feeds {
|
||||
for i := range fs {
|
||||
feed := &fs[i]
|
||||
if feed.URL == nil {
|
||||
continue
|
||||
}
|
||||
files := advisories[feed]
|
||||
if files == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
feedURL, err := url.Parse(string(*feed.URL))
|
||||
if err != nil {
|
||||
p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||
continue
|
||||
}
|
||||
|
||||
feedBase, err := util.BaseURL(feedURL)
|
||||
if err != nil {
|
||||
p.badProviderMetadata.error("Bad base path: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
label := defaults(feed.TLPLabel, csaf.TLPLabelUnlabeled)
|
||||
if err := p.categoryCheck(feedBase, label); err != nil {
|
||||
if err != errContinue {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
p.labelChecker.feedURL = feedURL.String()
|
||||
p.labelChecker.feedLabel = label
|
||||
|
||||
// TODO: Issue a warning if we want check AMBER+ without an
|
||||
// authorizing client.
|
||||
|
||||
if err := p.integrity(files, rolieMask, p.badProviderMetadata.add); err != nil {
|
||||
if err != errContinue {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 3: Check for completeness.
|
||||
|
||||
hasSummary := util.Set[csaf.TLPLabel]{}
|
||||
|
||||
var (
|
||||
hasUnlabeled = false
|
||||
hasWhite = false
|
||||
hasGreen = false
|
||||
)
|
||||
|
||||
for _, fs := range feeds {
|
||||
for i := range fs {
|
||||
feed := &fs[i]
|
||||
if feed.URL == nil {
|
||||
continue
|
||||
}
|
||||
files := advisories[feed]
|
||||
if files == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
feedBase, err := url.Parse(string(*feed.URL))
|
||||
if err != nil {
|
||||
p.badProviderMetadata.error("Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||
continue
|
||||
}
|
||||
|
||||
makeAbs := makeAbsolute(feedBase)
|
||||
label := defaults(feed.TLPLabel, csaf.TLPLabelUnlabeled)
|
||||
|
||||
switch label {
|
||||
case csaf.TLPLabelUnlabeled:
|
||||
hasUnlabeled = true
|
||||
case csaf.TLPLabelWhite:
|
||||
hasWhite = true
|
||||
case csaf.TLPLabelGreen:
|
||||
hasGreen = true
|
||||
}
|
||||
|
||||
reference := p.labelChecker.advisories[label]
|
||||
advisories := make(util.Set[string], len(reference))
|
||||
|
||||
for _, adv := range files {
|
||||
u, err := url.Parse(adv.URL())
|
||||
if err != nil {
|
||||
p.badProviderMetadata.error(
|
||||
"Invalid URL %s in feed: %v.", *feed.URL, err)
|
||||
continue
|
||||
}
|
||||
advisories.Add(makeAbs(u).String())
|
||||
}
|
||||
if advisories.ContainsAll(reference) {
|
||||
hasSummary.Add(label)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !hasWhite && !hasGreen && !hasUnlabeled {
|
||||
p.badROLIEFeed.error(
|
||||
"One ROLIE feed with a TLP:WHITE, TLP:GREEN or unlabeled tlp must exist, " +
|
||||
"but none were found.")
|
||||
}
|
||||
|
||||
// Every TLP level with data should have at least on summary feed.
|
||||
for _, label := range []csaf.TLPLabel{
|
||||
csaf.TLPLabelUnlabeled,
|
||||
csaf.TLPLabelWhite,
|
||||
csaf.TLPLabelGreen,
|
||||
csaf.TLPLabelAmber,
|
||||
csaf.TLPLabelRed,
|
||||
} {
|
||||
if !hasSummary.Contains(label) && len(p.labelChecker.advisories[label]) > 0 {
|
||||
p.badROLIEFeed.warn(
|
||||
"ROLIE feed for TLP:%s has no accessible listed feed covering all advisories.",
|
||||
label)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// categoryCheck checks for the existence of a feeds ROLIE category document and if it does,
|
||||
// whether the category document contains distinguishing categories
|
||||
func (p *processor) categoryCheck(folderURL string, label csaf.TLPLabel) error {
|
||||
labelname := strings.ToLower(string(label))
|
||||
urlrc := folderURL + "category-" + labelname + ".json"
|
||||
|
||||
p.badROLIECategory.use()
|
||||
client := p.httpClient()
|
||||
res, err := client.Get(urlrc)
|
||||
if err != nil {
|
||||
p.badROLIECategory.error(
|
||||
"Cannot fetch rolie category document %s: %v", urlrc, err)
|
||||
return errContinue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
p.badROLIECategory.warn("Fetching %s failed. Status code %d (%s)",
|
||||
urlrc, res.StatusCode, res.Status)
|
||||
return errContinue
|
||||
}
|
||||
rolieCategory, err := func() (*csaf.ROLIECategoryDocument, error) {
|
||||
defer res.Body.Close()
|
||||
return csaf.LoadROLIECategoryDocument(res.Body)
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
p.badROLIECategory.error(
|
||||
"Loading ROLIE category document %s failed: %v.", urlrc, err)
|
||||
return errContinue
|
||||
}
|
||||
if len(rolieCategory.Categories.Category) == 0 {
|
||||
p.badROLIECategory.warn(
|
||||
"No distinguishing categories in ROLIE category document: %s", urlrc)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// serviceCheck checks if a ROLIE service document exists and if it does,
|
||||
// whether it contains all ROLIE feeds.
|
||||
func (p *processor) serviceCheck(feeds [][]csaf.Feed) error {
|
||||
// service category document should be next to the pmd
|
||||
pmdURL, err := url.Parse(p.pmdURL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
baseURL, err := util.BaseURL(pmdURL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
urls := baseURL + "service.json"
|
||||
|
||||
// load service document
|
||||
p.badROLIEService.use()
|
||||
|
||||
client := p.httpClient()
|
||||
res, err := client.Get(urls)
|
||||
if err != nil {
|
||||
p.badROLIEService.error(
|
||||
"Cannot fetch rolie service document %s: %v", urls, err)
|
||||
return errContinue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
p.badROLIEService.warn("Fetching %s failed. Status code %d (%s)",
|
||||
urls, res.StatusCode, res.Status)
|
||||
return errContinue
|
||||
}
|
||||
|
||||
rolieService, err := func() (*csaf.ROLIEServiceDocument, error) {
|
||||
defer res.Body.Close()
|
||||
return csaf.LoadROLIEServiceDocument(res.Body)
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
p.badROLIEService.error(
|
||||
"Loading ROLIE service document %s failed: %v.", urls, err)
|
||||
return errContinue
|
||||
}
|
||||
|
||||
// Build lists of all feeds in feeds and in the Service Document
|
||||
var (
|
||||
sfeeds = util.Set[string]{}
|
||||
ffeeds = util.Set[string]{}
|
||||
)
|
||||
for _, col := range rolieService.Service.Workspace {
|
||||
for _, fd := range col.Collection {
|
||||
sfeeds.Add(fd.HRef)
|
||||
}
|
||||
}
|
||||
for _, r := range feeds {
|
||||
for _, s := range r {
|
||||
ffeeds.Add(string(*s.URL))
|
||||
}
|
||||
}
|
||||
|
||||
// Check if ROLIE Service Document contains exactly all ROLIE feeds
|
||||
if m1 := sfeeds.Difference(ffeeds).Keys(); len(m1) != 0 {
|
||||
sort.Strings(m1)
|
||||
p.badROLIEService.error(
|
||||
"The ROLIE service document %s contains nonexistent feed entries: %v", urls, m1)
|
||||
}
|
||||
if m2 := ffeeds.Difference(sfeeds).Keys(); len(m2) != 0 {
|
||||
sort.Strings(m2)
|
||||
p.badROLIEService.error(
|
||||
"The ROLIE service document %s is missing feed entries: %v", urls, m2)
|
||||
}
|
||||
|
||||
// TODO: Check conformity with RFC8322
|
||||
return nil
|
||||
}
|
||||
|
||||
// extractAdvisoryIdentifier extracts document/publisher/namespace and
|
||||
// document/tracking/id from advisory and stores it in an identifier.
|
||||
func (p *processor) extractAdvisoryIdentifier(doc any) (identifier, error) {
|
||||
namespace, err := p.expr.Eval(`$.document.publisher.namespace`, doc)
|
||||
if err != nil {
|
||||
return identifier{}, err
|
||||
}
|
||||
|
||||
idString, err := p.expr.Eval(`$.document.tracking.id`, doc)
|
||||
if err != nil {
|
||||
return identifier{}, err
|
||||
}
|
||||
|
||||
ns, ok := namespace.(string)
|
||||
if !ok {
|
||||
return identifier{}, errors.New("cannot extract 'namespace'")
|
||||
}
|
||||
id, ok := idString.(string)
|
||||
if !ok {
|
||||
return identifier{}, errors.New("cannot extract 'id'")
|
||||
}
|
||||
|
||||
return identifier{
|
||||
namespace: ns,
|
||||
id: id,
|
||||
}, nil
|
||||
}
|
||||
209
cmd/csaf_checker/rules.go
Normal file
209
cmd/csaf_checker/rules.go
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
)
|
||||
|
||||
type ruleCondition int
|
||||
|
||||
const (
|
||||
condAll ruleCondition = iota
|
||||
condOneOf
|
||||
)
|
||||
|
||||
type requirementRules struct {
|
||||
cond ruleCondition
|
||||
satisfies int
|
||||
subs []*requirementRules
|
||||
}
|
||||
|
||||
var (
|
||||
publisherRules = &requirementRules{
|
||||
cond: condAll,
|
||||
subs: ruleAtoms(1, 2, 3, 4),
|
||||
}
|
||||
|
||||
providerRules = &requirementRules{
|
||||
cond: condAll,
|
||||
subs: []*requirementRules{
|
||||
publisherRules,
|
||||
{cond: condAll, subs: ruleAtoms(5, 6, 7)},
|
||||
{cond: condOneOf, subs: ruleAtoms(8, 9, 10)},
|
||||
{cond: condOneOf, subs: []*requirementRules{
|
||||
{cond: condAll, subs: ruleAtoms(11, 12, 13, 14)},
|
||||
{cond: condAll, subs: ruleAtoms(15, 16, 17)},
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
trustedProviderRules = &requirementRules{
|
||||
cond: condAll,
|
||||
subs: []*requirementRules{
|
||||
providerRules,
|
||||
{cond: condAll, subs: ruleAtoms(18, 19, 20)},
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
// roleRequirements returns the rules for the given role.
|
||||
func roleRequirements(role csaf.MetadataRole) *requirementRules {
|
||||
switch role {
|
||||
case csaf.MetadataRoleTrustedProvider:
|
||||
return trustedProviderRules
|
||||
case csaf.MetadataRoleProvider:
|
||||
return providerRules
|
||||
case csaf.MetadataRolePublisher:
|
||||
return publisherRules
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// ruleAtoms is a helper function to build the leaves of
|
||||
// a rules tree.
|
||||
func ruleAtoms(nums ...int) []*requirementRules {
|
||||
rules := make([]*requirementRules, len(nums))
|
||||
for i, num := range nums {
|
||||
rules[i] = &requirementRules{
|
||||
cond: condAll,
|
||||
satisfies: num,
|
||||
}
|
||||
}
|
||||
return rules
|
||||
}
|
||||
|
||||
// reporters assembles a list of reporters needed for a given set
|
||||
// of rules. The given nums are mandatory.
|
||||
func (rules *requirementRules) reporters(nums []int) []reporter {
|
||||
if rules == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
var recurse func(*requirementRules)
|
||||
recurse = func(rules *requirementRules) {
|
||||
if rules.satisfies != 0 {
|
||||
// There should not be any dupes.
|
||||
for _, n := range nums {
|
||||
if n == rules.satisfies {
|
||||
goto doRecurse
|
||||
}
|
||||
}
|
||||
nums = append(nums, rules.satisfies)
|
||||
}
|
||||
doRecurse:
|
||||
for _, sub := range rules.subs {
|
||||
recurse(sub)
|
||||
}
|
||||
}
|
||||
recurse(rules)
|
||||
|
||||
sort.Ints(nums)
|
||||
|
||||
reps := make([]reporter, len(nums))
|
||||
|
||||
for i, n := range nums {
|
||||
reps[i] = reporters[n]
|
||||
}
|
||||
return reps
|
||||
}
|
||||
|
||||
// eval evalutes a set of rules given a given processor state.
|
||||
func (rules *requirementRules) eval(p *processor) bool {
|
||||
if rules == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
var recurse func(*requirementRules) bool
|
||||
|
||||
recurse = func(rules *requirementRules) bool {
|
||||
if rules.satisfies != 0 {
|
||||
return p.eval(rules.satisfies)
|
||||
}
|
||||
switch rules.cond {
|
||||
case condAll:
|
||||
for _, sub := range rules.subs {
|
||||
if !recurse(sub) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
case condOneOf:
|
||||
for _, sub := range rules.subs {
|
||||
if recurse(sub) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected cond %v in eval", rules.cond))
|
||||
}
|
||||
}
|
||||
|
||||
return recurse(rules)
|
||||
}
|
||||
|
||||
// eval evalutes the processing state for a given requirement.
|
||||
func (p *processor) eval(requirement int) bool {
|
||||
|
||||
switch requirement {
|
||||
case 1:
|
||||
return !p.invalidAdvisories.hasErrors()
|
||||
case 2:
|
||||
return !p.badFilenames.hasErrors()
|
||||
case 3:
|
||||
return len(p.noneTLS) == 0
|
||||
case 4:
|
||||
return !p.badWhitePermissions.hasErrors()
|
||||
case 5:
|
||||
return !p.badAmberRedPermissions.hasErrors()
|
||||
// Currently, only domains using HTTP-Header redirects are checked.
|
||||
// A domain reaching evaluation will only have HTTP-Header redirects if any,
|
||||
// and thus requirement 6 will always be fullfilled.
|
||||
case 6:
|
||||
return true
|
||||
case 7:
|
||||
return !p.badProviderMetadata.hasErrors()
|
||||
case 8:
|
||||
return !p.badSecurity.hasErrors()
|
||||
case 9:
|
||||
return !p.badWellknownMetadata.hasErrors()
|
||||
case 10:
|
||||
return !p.badDNSPath.hasErrors()
|
||||
|
||||
case 11:
|
||||
return !p.badFolders.hasErrors()
|
||||
case 12:
|
||||
return !p.badIndices.hasErrors()
|
||||
case 13:
|
||||
return !p.badChanges.hasErrors()
|
||||
case 14:
|
||||
return !p.badDirListings.hasErrors()
|
||||
|
||||
case 15:
|
||||
return !p.badROLIEFeed.hasErrors()
|
||||
case 16:
|
||||
return !p.badROLIEService.hasErrors()
|
||||
case 17:
|
||||
return !p.badROLIECategory.hasErrors()
|
||||
|
||||
case 18:
|
||||
return !p.badIntegrities.hasErrors()
|
||||
case 19:
|
||||
return !p.badSignatures.hasErrors()
|
||||
case 20:
|
||||
return !p.badPGPs.hasErrors()
|
||||
default:
|
||||
panic(fmt.Sprintf("evaluating unexpected requirement %d", requirement))
|
||||
}
|
||||
}
|
||||
|
|
@ -9,6 +9,47 @@
|
|||
<h1>CSAF-Checker - Report</h1>
|
||||
{{- range .Domains }}
|
||||
<h2>{{ .Name }}{{ if .HasErrors }} (failed){{ end }}</h2>
|
||||
<p>
|
||||
{{ with .Publisher }}
|
||||
<fieldset>
|
||||
<legend>Publisher</legend>
|
||||
<table>
|
||||
{{ with .Category }}
|
||||
<tr>
|
||||
<td><strong>Category:</strong></td>
|
||||
<td>{{ . }}</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
{{ with .Name }}
|
||||
<tr>
|
||||
<td><strong>Name:</strong></td>
|
||||
<td>{{ . }}</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
{{ with .Namespace }}
|
||||
<tr>
|
||||
<td><strong>Namespace:</strong></td>
|
||||
<td>{{ . }}</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
{{ with .ContactDetails }}
|
||||
<tr>
|
||||
<td><strong>Contact Details:</strong></td>
|
||||
<td>{{ . }}</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
{{ with .IssuingAuthority }}
|
||||
<tr>
|
||||
<td><strong>Issuing Authority:</strong></td>
|
||||
<td>{{ . }}</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
</table>
|
||||
</fieldset>
|
||||
{{ end }}
|
||||
</br>
|
||||
{{ with .Role }}<strong>Role:</strong> {{ . }}{{ end }}
|
||||
</p>
|
||||
|
||||
<dl>
|
||||
{{ range .Requirements }}
|
||||
|
|
@ -21,8 +62,26 @@
|
|||
{{ end }}
|
||||
|
||||
<footer>
|
||||
Date of run: <time datetime="{{.Date}}">{{.Date}}</time>
|
||||
csaf_checker v<span class="version">{{.Version}}</span>
|
||||
<fieldset>
|
||||
<legend>Runtime</legend>
|
||||
<table>
|
||||
<tr>
|
||||
<td><strong>Date of run:</strong></td>
|
||||
<td><time datetime="{{ .Date.Format "2006-01-02T15:04:05Z"}}">{{ .Date.Local.Format "Monday, 02 Jan 2006 15:04:05 MST" }}</time></td>
|
||||
</tr>
|
||||
{{ if .TimeRange }}{{ with .TimeRange }}
|
||||
<tr>
|
||||
<td><strong>Time range:</strong></td>
|
||||
<td><time datetime="{{ (index . 0).Format "2006-01-02T15:04:05Z"}}">{{ (index . 0).Local.Format "Monday, 02 Jan 2006 15:04:05 MST" }}</time> -
|
||||
<time datetime="{{ (index . 1).Format "2006-01-02T15:04:05Z"}}">{{ (index . 1).Local.Format "Monday, 02 Jan 2006 15:04:05 MST" }}</time></td>
|
||||
</tr>
|
||||
{{ end }}{{ end }}
|
||||
<tr>
|
||||
<td><strong>Version:</strong></td>
|
||||
<td>csaf_checker v<span class="version">{{ .Version }}</span></td>
|
||||
</tr>
|
||||
</table>
|
||||
</fieldset>
|
||||
</footer>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
|||
277
cmd/csaf_downloader/config.go
Normal file
277
cmd/csaf_downloader/config.go
Normal file
|
|
@ -0,0 +1,277 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/certs"
|
||||
"github.com/gocsaf/csaf/v3/internal/filter"
|
||||
"github.com/gocsaf/csaf/v3/internal/models"
|
||||
"github.com/gocsaf/csaf/v3/internal/options"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultWorker = 2
|
||||
defaultPreset = "mandatory"
|
||||
defaultForwardQueue = 5
|
||||
defaultValidationMode = validationStrict
|
||||
defaultLogFile = "downloader.log"
|
||||
defaultLogLevel = slog.LevelInfo
|
||||
)
|
||||
|
||||
type validationMode string
|
||||
|
||||
const (
|
||||
validationStrict = validationMode("strict")
|
||||
validationUnsafe = validationMode("unsafe")
|
||||
)
|
||||
|
||||
type hashAlgorithm string
|
||||
|
||||
const (
|
||||
algSha256 = hashAlgorithm("sha256")
|
||||
algSha512 = hashAlgorithm("sha512")
|
||||
)
|
||||
|
||||
type config struct {
|
||||
Directory string `short:"d" long:"directory" description:"DIRectory to store the downloaded files in" value-name:"DIR" toml:"directory"`
|
||||
Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider" toml:"insecure"`
|
||||
IgnoreSignatureCheck bool `long:"ignore_sigcheck" description:"Ignore signature check results, just warn on mismatch" toml:"ignore_sigcheck"`
|
||||
ClientCert *string `long:"client_cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE" toml:"client_cert"`
|
||||
ClientKey *string `long:"client_key" description:"TLS client private key file (PEM encoded data)" value-name:"KEY-FILE" toml:"client_key"`
|
||||
ClientPassphrase *string `long:"client_passphrase" description:"Optional passphrase for the client cert (limited, experimental, see doc)" value-name:"PASSPHRASE" toml:"client_passphrase"`
|
||||
Version bool `long:"version" description:"Display version of the binary" toml:"-"`
|
||||
NoStore bool `long:"no_store" short:"n" description:"Do not store files" toml:"no_store"`
|
||||
Rate *float64 `long:"rate" short:"r" description:"The average upper limit of https operations per second (defaults to unlimited)" toml:"rate"`
|
||||
Worker int `long:"worker" short:"w" description:"NUMber of concurrent downloads" value-name:"NUM" toml:"worker"`
|
||||
Range *models.TimeRange `long:"time_range" short:"t" description:"RANGE of time from which advisories to download" value-name:"RANGE" toml:"time_range"`
|
||||
Folder string `long:"folder" short:"f" description:"Download into a given subFOLDER" value-name:"FOLDER" toml:"folder"`
|
||||
IgnorePattern []string `long:"ignore_pattern" short:"i" description:"Do not download files if their URLs match any of the given PATTERNs" value-name:"PATTERN" toml:"ignore_pattern"`
|
||||
ExtraHeader http.Header `long:"header" short:"H" description:"One or more extra HTTP header fields" toml:"header"`
|
||||
|
||||
EnumeratePMDOnly bool `long:"enumerate_pmd_only" description:"If this flag is set to true, the downloader will only enumerate valid provider metadata files, but not download documents" toml:"enumerate_pmd_only"`
|
||||
|
||||
RemoteValidator string `long:"validator" description:"URL to validate documents remotely" value-name:"URL" toml:"validator"`
|
||||
RemoteValidatorCache string `long:"validator_cache" description:"FILE to cache remote validations" value-name:"FILE" toml:"validator_cache"`
|
||||
RemoteValidatorPresets []string `long:"validator_preset" description:"One or more PRESETS to validate remotely" value-name:"PRESETS" toml:"validator_preset"`
|
||||
|
||||
//lint:ignore SA5008 We are using choice twice: strict, unsafe.
|
||||
ValidationMode validationMode `long:"validation_mode" short:"m" choice:"strict" choice:"unsafe" value-name:"MODE" description:"MODE how strict the validation is" toml:"validation_mode"`
|
||||
|
||||
ForwardURL string `long:"forward_url" description:"URL of HTTP endpoint to forward downloads to" value-name:"URL" toml:"forward_url"`
|
||||
ForwardHeader http.Header `long:"forward_header" description:"One or more extra HTTP header fields used by forwarding" toml:"forward_header"`
|
||||
ForwardQueue int `long:"forward_queue" description:"Maximal queue LENGTH before forwarder" value-name:"LENGTH" toml:"forward_queue"`
|
||||
ForwardInsecure bool `long:"forward_insecure" description:"Do not check TLS certificates from forward endpoint" toml:"forward_insecure"`
|
||||
|
||||
LogFile *string `long:"log_file" description:"FILE to log downloading to" value-name:"FILE" toml:"log_file"`
|
||||
//lint:ignore SA5008 We are using choice or than once: debug, info, warn, error
|
||||
LogLevel *options.LogLevel `long:"log_level" description:"LEVEL of logging details" value-name:"LEVEL" choice:"debug" choice:"info" choice:"warn" choice:"error" toml:"log_level"`
|
||||
|
||||
Config string `short:"c" long:"config" description:"Path to config TOML file" value-name:"TOML-FILE" toml:"-"`
|
||||
|
||||
clientCerts []tls.Certificate
|
||||
ignorePattern filter.PatternMatcher
|
||||
|
||||
//lint:ignore SA5008 We are using choice or than once: sha256, sha512
|
||||
PreferredHash hashAlgorithm `long:"preferred_hash" choice:"sha256" choice:"sha512" value-name:"HASH" description:"HASH to prefer" toml:"preferred_hash"`
|
||||
}
|
||||
|
||||
// configPaths are the potential file locations of the config file.
|
||||
var configPaths = []string{
|
||||
"~/.config/csaf/downloader.toml",
|
||||
"~/.csaf_downloader.toml",
|
||||
"csaf_downloader.toml",
|
||||
}
|
||||
|
||||
// parseArgsConfig parses the command line and if need a config file.
|
||||
func parseArgsConfig() ([]string, *config, error) {
|
||||
var (
|
||||
logFile = defaultLogFile
|
||||
logLevel = &options.LogLevel{Level: defaultLogLevel}
|
||||
)
|
||||
p := options.Parser[config]{
|
||||
DefaultConfigLocations: configPaths,
|
||||
ConfigLocation: func(cfg *config) string { return cfg.Config },
|
||||
Usage: "[OPTIONS] domain...",
|
||||
HasVersion: func(cfg *config) bool { return cfg.Version },
|
||||
SetDefaults: func(cfg *config) {
|
||||
cfg.Worker = defaultWorker
|
||||
cfg.RemoteValidatorPresets = []string{defaultPreset}
|
||||
cfg.ValidationMode = defaultValidationMode
|
||||
cfg.ForwardQueue = defaultForwardQueue
|
||||
cfg.LogFile = &logFile
|
||||
cfg.LogLevel = logLevel
|
||||
},
|
||||
// Re-establish default values if not set.
|
||||
EnsureDefaults: func(cfg *config) {
|
||||
if cfg.Worker == 0 {
|
||||
cfg.Worker = defaultWorker
|
||||
}
|
||||
if cfg.RemoteValidatorPresets == nil {
|
||||
cfg.RemoteValidatorPresets = []string{defaultPreset}
|
||||
}
|
||||
switch cfg.ValidationMode {
|
||||
case validationStrict, validationUnsafe:
|
||||
default:
|
||||
cfg.ValidationMode = validationStrict
|
||||
}
|
||||
if cfg.LogFile == nil {
|
||||
cfg.LogFile = &logFile
|
||||
}
|
||||
if cfg.LogLevel == nil {
|
||||
cfg.LogLevel = logLevel
|
||||
}
|
||||
},
|
||||
}
|
||||
return p.Parse()
|
||||
}
|
||||
|
||||
// UnmarshalText implements [encoding.TextUnmarshaler].
|
||||
func (vm *validationMode) UnmarshalText(text []byte) error {
|
||||
switch m := validationMode(text); m {
|
||||
case validationStrict, validationUnsafe:
|
||||
*vm = m
|
||||
default:
|
||||
return fmt.Errorf(`invalid value %q (expected "strict" or "unsafe)"`, m)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UnmarshalFlag implements [flags.UnmarshalFlag].
|
||||
func (vm *validationMode) UnmarshalFlag(value string) error {
|
||||
var v validationMode
|
||||
if err := v.UnmarshalText([]byte(value)); err != nil {
|
||||
return err
|
||||
}
|
||||
*vm = v
|
||||
return nil
|
||||
}
|
||||
|
||||
// ignoreFile returns true if the given URL should not be downloaded.
|
||||
func (cfg *config) ignoreURL(u string) bool {
|
||||
return cfg.ignorePattern.Matches(u)
|
||||
}
|
||||
|
||||
// verbose is considered a log level equal or less debug.
|
||||
func (cfg *config) verbose() bool {
|
||||
return cfg.LogLevel.Level <= slog.LevelDebug
|
||||
}
|
||||
|
||||
// prepareDirectory ensures that the working directory
|
||||
// exists and is setup properly.
|
||||
func (cfg *config) prepareDirectory() error {
|
||||
// If not given use current working directory.
|
||||
if cfg.Directory == "" {
|
||||
dir, err := os.Getwd()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.Directory = dir
|
||||
return nil
|
||||
}
|
||||
// Use given directory
|
||||
if _, err := os.Stat(cfg.Directory); err != nil {
|
||||
// If it does not exist create it.
|
||||
if os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(cfg.Directory, 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// dropSubSeconds drops all parts below resolution of seconds.
|
||||
func dropSubSeconds(_ []string, a slog.Attr) slog.Attr {
|
||||
if a.Key == slog.TimeKey {
|
||||
t := a.Value.Time()
|
||||
a.Value = slog.TimeValue(t.Truncate(time.Second))
|
||||
}
|
||||
return a
|
||||
}
|
||||
|
||||
// prepareLogging sets up the structured logging.
|
||||
func (cfg *config) prepareLogging() error {
|
||||
var w io.Writer
|
||||
if cfg.LogFile == nil || *cfg.LogFile == "" {
|
||||
log.Println("using STDERR for logging")
|
||||
w = os.Stderr
|
||||
} else {
|
||||
var fname string
|
||||
// We put the log inside the download folder
|
||||
// if it is not absolute.
|
||||
if filepath.IsAbs(*cfg.LogFile) {
|
||||
fname = *cfg.LogFile
|
||||
} else {
|
||||
fname = filepath.Join(cfg.Directory, *cfg.LogFile)
|
||||
}
|
||||
f, err := os.OpenFile(fname, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Printf("using %q for logging\n", fname)
|
||||
w = f
|
||||
}
|
||||
ho := slog.HandlerOptions{
|
||||
// AddSource: true,
|
||||
Level: cfg.LogLevel.Level,
|
||||
ReplaceAttr: dropSubSeconds,
|
||||
}
|
||||
handler := slog.NewJSONHandler(w, &ho)
|
||||
logger := slog.New(handler)
|
||||
slog.SetDefault(logger)
|
||||
return nil
|
||||
}
|
||||
|
||||
// compileIgnorePatterns compiles the configure patterns to be ignored.
|
||||
func (cfg *config) compileIgnorePatterns() error {
|
||||
pm, err := filter.NewPatternMatcher(cfg.IgnorePattern)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.ignorePattern = pm
|
||||
return nil
|
||||
}
|
||||
|
||||
// prepareCertificates loads the client side certificates used by the HTTP client.
|
||||
func (cfg *config) prepareCertificates() error {
|
||||
cert, err := certs.LoadCertificate(
|
||||
cfg.ClientCert, cfg.ClientKey, cfg.ClientPassphrase)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.clientCerts = cert
|
||||
return nil
|
||||
}
|
||||
|
||||
// prepare prepares internal state of a loaded configuration.
|
||||
func (cfg *config) prepare() error {
|
||||
for _, prepare := range []func(*config) error{
|
||||
(*config).prepareDirectory,
|
||||
(*config).prepareLogging,
|
||||
(*config).prepareCertificates,
|
||||
(*config).compileIgnorePatterns,
|
||||
} {
|
||||
if err := prepare(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
880
cmd/csaf_downloader/downloader.go
Normal file
880
cmd/csaf_downloader/downloader.go
Normal file
|
|
@ -0,0 +1,880 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022, 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022, 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"golang.org/x/time/rate"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
type hashFetchInfo struct {
|
||||
url string
|
||||
preferred bool
|
||||
warn bool
|
||||
hashType hashAlgorithm
|
||||
}
|
||||
|
||||
type downloader struct {
|
||||
cfg *config
|
||||
client *util.Client // Used for testing
|
||||
keys *crypto.KeyRing
|
||||
validator csaf.RemoteValidator
|
||||
forwarder *forwarder
|
||||
mkdirMu sync.Mutex
|
||||
statsMu sync.Mutex
|
||||
stats stats
|
||||
}
|
||||
|
||||
// failedValidationDir is the name of the sub folder
|
||||
// where advisories are stored that fail validation in
|
||||
// unsafe mode.
|
||||
const failedValidationDir = "failed_validation"
|
||||
|
||||
func newDownloader(cfg *config) (*downloader, error) {
|
||||
var validator csaf.RemoteValidator
|
||||
|
||||
if cfg.RemoteValidator != "" {
|
||||
validatorOptions := csaf.RemoteValidatorOptions{
|
||||
URL: cfg.RemoteValidator,
|
||||
Presets: cfg.RemoteValidatorPresets,
|
||||
Cache: cfg.RemoteValidatorCache,
|
||||
}
|
||||
var err error
|
||||
if validator, err = validatorOptions.Open(); err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"preparing remote validator failed: %w", err)
|
||||
}
|
||||
validator = csaf.SynchronizedRemoteValidator(validator)
|
||||
}
|
||||
|
||||
return &downloader{
|
||||
cfg: cfg,
|
||||
validator: validator,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *downloader) close() {
|
||||
if d.validator != nil {
|
||||
d.validator.Close()
|
||||
d.validator = nil
|
||||
}
|
||||
}
|
||||
|
||||
// addStats add stats to total stats
|
||||
func (d *downloader) addStats(o *stats) {
|
||||
d.statsMu.Lock()
|
||||
defer d.statsMu.Unlock()
|
||||
d.stats.add(o)
|
||||
}
|
||||
|
||||
// logRedirect logs redirects of the http client.
|
||||
func logRedirect(req *http.Request, via []*http.Request) error {
|
||||
vs := make([]string, len(via))
|
||||
for i, v := range via {
|
||||
vs[i] = v.URL.String()
|
||||
}
|
||||
slog.Debug("Redirecting",
|
||||
"to", req.URL.String(),
|
||||
"via", strings.Join(vs, " -> "))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *downloader) httpClient() util.Client {
|
||||
hClient := http.Client{}
|
||||
|
||||
if d.cfg.verbose() {
|
||||
hClient.CheckRedirect = logRedirect
|
||||
}
|
||||
|
||||
var tlsConfig tls.Config
|
||||
if d.cfg.Insecure {
|
||||
tlsConfig.InsecureSkipVerify = true
|
||||
}
|
||||
|
||||
if len(d.cfg.clientCerts) != 0 {
|
||||
tlsConfig.Certificates = d.cfg.clientCerts
|
||||
}
|
||||
|
||||
hClient.Transport = &http.Transport{
|
||||
TLSClientConfig: &tlsConfig,
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
}
|
||||
|
||||
client := util.Client(&hClient)
|
||||
|
||||
// Overwrite for testing purposes
|
||||
if d.client != nil {
|
||||
client = *d.client
|
||||
}
|
||||
|
||||
// Add extra headers.
|
||||
client = &util.HeaderClient{
|
||||
Client: client,
|
||||
Header: d.cfg.ExtraHeader,
|
||||
}
|
||||
|
||||
// Add optional URL logging.
|
||||
if d.cfg.verbose() {
|
||||
client = &util.LoggingClient{
|
||||
Client: client,
|
||||
Log: httpLog("downloader"),
|
||||
}
|
||||
}
|
||||
|
||||
// Add optional rate limiting.
|
||||
if d.cfg.Rate != nil {
|
||||
client = &util.LimitingClient{
|
||||
Client: client,
|
||||
Limiter: rate.NewLimiter(rate.Limit(*d.cfg.Rate), 1),
|
||||
}
|
||||
}
|
||||
|
||||
return client
|
||||
}
|
||||
|
||||
// httpLog does structured logging in a [util.LoggingClient].
|
||||
func httpLog(who string) func(string, string) {
|
||||
return func(method, url string) {
|
||||
slog.Debug("http",
|
||||
"who", who,
|
||||
"method", method,
|
||||
"url", url)
|
||||
}
|
||||
}
|
||||
|
||||
func (d *downloader) enumerate(domain string) error {
|
||||
client := d.httpClient()
|
||||
|
||||
loader := csaf.NewProviderMetadataLoader(client)
|
||||
lpmd := loader.Enumerate(domain)
|
||||
|
||||
docs := []any{}
|
||||
|
||||
for _, pmd := range lpmd {
|
||||
if d.cfg.verbose() {
|
||||
for i := range pmd.Messages {
|
||||
slog.Debug("Enumerating provider-metadata.json",
|
||||
"domain", domain,
|
||||
"message", pmd.Messages[i].Message)
|
||||
}
|
||||
}
|
||||
|
||||
docs = append(docs, pmd.Document)
|
||||
}
|
||||
|
||||
// print the results
|
||||
doc, err := json.MarshalIndent(docs, "", " ")
|
||||
if err != nil {
|
||||
slog.Error("Couldn't marshal PMD document json")
|
||||
}
|
||||
fmt.Println(string(doc))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *downloader) download(ctx context.Context, domain string) error {
|
||||
client := d.httpClient()
|
||||
|
||||
loader := csaf.NewProviderMetadataLoader(client)
|
||||
|
||||
lpmd := loader.Load(domain)
|
||||
|
||||
if !lpmd.Valid() {
|
||||
for i := range lpmd.Messages {
|
||||
slog.Error("Loading provider-metadata.json",
|
||||
"domain", domain,
|
||||
"message", lpmd.Messages[i].Message)
|
||||
}
|
||||
return fmt.Errorf("no valid provider-metadata.json found for '%s'", domain)
|
||||
} else if d.cfg.verbose() {
|
||||
for i := range lpmd.Messages {
|
||||
slog.Debug("Loading provider-metadata.json",
|
||||
"domain", domain,
|
||||
"message", lpmd.Messages[i].Message)
|
||||
}
|
||||
}
|
||||
|
||||
pmdURL, err := url.Parse(lpmd.URL)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid URL '%s': %v", lpmd.URL, err)
|
||||
}
|
||||
|
||||
expr := util.NewPathEval()
|
||||
|
||||
if err := d.loadOpenPGPKeys(
|
||||
client,
|
||||
lpmd.Document,
|
||||
expr,
|
||||
); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
afp := csaf.NewAdvisoryFileProcessor(
|
||||
client,
|
||||
expr,
|
||||
lpmd.Document,
|
||||
pmdURL)
|
||||
|
||||
// Do we need time range based filtering?
|
||||
if d.cfg.Range != nil {
|
||||
slog.Debug("Setting up filter to accept advisories within",
|
||||
"timerange", d.cfg.Range)
|
||||
afp.AgeAccept = d.cfg.Range.Contains
|
||||
}
|
||||
|
||||
return afp.Process(func(label csaf.TLPLabel, files []csaf.AdvisoryFile) error {
|
||||
return d.downloadFiles(ctx, label, files)
|
||||
})
|
||||
}
|
||||
|
||||
func (d *downloader) downloadFiles(
|
||||
ctx context.Context,
|
||||
label csaf.TLPLabel,
|
||||
files []csaf.AdvisoryFile,
|
||||
) error {
|
||||
var (
|
||||
advisoryCh = make(chan csaf.AdvisoryFile)
|
||||
errorCh = make(chan error)
|
||||
errDone = make(chan struct{})
|
||||
errs []error
|
||||
wg sync.WaitGroup
|
||||
)
|
||||
|
||||
// collect errors
|
||||
go func() {
|
||||
defer close(errDone)
|
||||
for err := range errorCh {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
}()
|
||||
|
||||
var n int
|
||||
if n = d.cfg.Worker; n < 1 {
|
||||
n = 1
|
||||
}
|
||||
|
||||
for i := 0; i < n; i++ {
|
||||
wg.Add(1)
|
||||
go d.downloadWorker(ctx, &wg, label, advisoryCh, errorCh)
|
||||
}
|
||||
|
||||
allFiles:
|
||||
for _, file := range files {
|
||||
select {
|
||||
case advisoryCh <- file:
|
||||
case <-ctx.Done():
|
||||
break allFiles
|
||||
}
|
||||
}
|
||||
|
||||
close(advisoryCh)
|
||||
wg.Wait()
|
||||
close(errorCh)
|
||||
<-errDone
|
||||
|
||||
return errors.Join(errs...)
|
||||
}
|
||||
|
||||
func (d *downloader) loadOpenPGPKeys(
|
||||
client util.Client,
|
||||
doc any,
|
||||
expr *util.PathEval,
|
||||
) error {
|
||||
src, err := expr.Eval("$.public_openpgp_keys", doc)
|
||||
if err != nil {
|
||||
// no keys.
|
||||
return nil
|
||||
}
|
||||
|
||||
var keys []csaf.PGPKey
|
||||
if err := util.ReMarshalJSON(&keys, src); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(keys) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Try to load
|
||||
|
||||
for i := range keys {
|
||||
key := &keys[i]
|
||||
if key.URL == nil {
|
||||
continue
|
||||
}
|
||||
u, err := url.Parse(*key.URL)
|
||||
if err != nil {
|
||||
slog.Warn("Invalid URL",
|
||||
"url", *key.URL,
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
res, err := client.Get(u.String())
|
||||
if err != nil {
|
||||
slog.Warn(
|
||||
"Fetching public OpenPGP key failed",
|
||||
"url", u,
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
slog.Warn(
|
||||
"Fetching public OpenPGP key failed",
|
||||
"url", u,
|
||||
"status_code", res.StatusCode,
|
||||
"status", res.Status)
|
||||
continue
|
||||
}
|
||||
|
||||
ckey, err := func() (*crypto.Key, error) {
|
||||
defer res.Body.Close()
|
||||
return crypto.NewKeyFromArmoredReader(res.Body)
|
||||
}()
|
||||
if err != nil {
|
||||
slog.Warn(
|
||||
"Reading public OpenPGP key failed",
|
||||
"url", u,
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if !strings.EqualFold(ckey.GetFingerprint(), string(key.Fingerprint)) {
|
||||
slog.Warn(
|
||||
"Fingerprint of public OpenPGP key does not match remotely loaded",
|
||||
"url", u, "fingerprint", key.Fingerprint, "remote-fingerprint", ckey.GetFingerprint())
|
||||
continue
|
||||
}
|
||||
if d.keys == nil {
|
||||
if keyring, err := crypto.NewKeyRing(ckey); err != nil {
|
||||
slog.Warn(
|
||||
"Creating store for public OpenPGP key failed",
|
||||
"url", u,
|
||||
"error", err)
|
||||
} else {
|
||||
d.keys = keyring
|
||||
}
|
||||
} else {
|
||||
d.keys.AddKey(ckey)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// logValidationIssues logs the issues reported by the advisory schema validation.
|
||||
func (d *downloader) logValidationIssues(url string, errors []string, err error) {
|
||||
if err != nil {
|
||||
slog.Error("Failed to validate",
|
||||
"url", url,
|
||||
"error", err)
|
||||
return
|
||||
}
|
||||
if len(errors) > 0 {
|
||||
if d.cfg.verbose() {
|
||||
slog.Error("CSAF file has validation errors",
|
||||
"url", url,
|
||||
"error", strings.Join(errors, ", "))
|
||||
} else {
|
||||
slog.Error("CSAF file has validation errors",
|
||||
"url", url,
|
||||
"count", len(errors))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// downloadContext stores the common context of a downloader.
|
||||
type downloadContext struct {
|
||||
d *downloader
|
||||
client util.Client
|
||||
data bytes.Buffer
|
||||
lastDir string
|
||||
initialReleaseDate time.Time
|
||||
dateExtract func(any) error
|
||||
lower string
|
||||
stats stats
|
||||
expr *util.PathEval
|
||||
}
|
||||
|
||||
func newDownloadContext(d *downloader, label csaf.TLPLabel) *downloadContext {
|
||||
dc := &downloadContext{
|
||||
d: d,
|
||||
client: d.httpClient(),
|
||||
lower: strings.ToLower(string(label)),
|
||||
expr: util.NewPathEval(),
|
||||
}
|
||||
dc.dateExtract = util.TimeMatcher(&dc.initialReleaseDate, time.RFC3339)
|
||||
return dc
|
||||
}
|
||||
|
||||
func (dc *downloadContext) downloadAdvisory(
|
||||
file csaf.AdvisoryFile,
|
||||
errorCh chan<- error,
|
||||
) error {
|
||||
u, err := url.Parse(file.URL())
|
||||
if err != nil {
|
||||
dc.stats.downloadFailed++
|
||||
slog.Warn("Ignoring invalid URL",
|
||||
"url", file.URL(),
|
||||
"error", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
if dc.d.cfg.ignoreURL(file.URL()) {
|
||||
slog.Debug("Ignoring URL", "url", file.URL())
|
||||
return nil
|
||||
}
|
||||
|
||||
// Ignore not conforming filenames.
|
||||
filename := filepath.Base(u.Path)
|
||||
if !util.ConformingFileName(filename) {
|
||||
dc.stats.filenameFailed++
|
||||
slog.Warn("Ignoring none conforming filename",
|
||||
"filename", filename)
|
||||
return nil
|
||||
}
|
||||
|
||||
resp, err := dc.client.Get(file.URL())
|
||||
if err != nil {
|
||||
dc.stats.downloadFailed++
|
||||
slog.Warn("Cannot GET",
|
||||
"url", file.URL(),
|
||||
"error", err)
|
||||
return nil
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
dc.stats.downloadFailed++
|
||||
slog.Warn("Cannot load",
|
||||
"url", file.URL(),
|
||||
"status", resp.Status,
|
||||
"status_code", resp.StatusCode)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Warn if we do not get JSON.
|
||||
if ct := resp.Header.Get("Content-Type"); ct != "application/json" {
|
||||
slog.Warn("Content type is not 'application/json'",
|
||||
"url", file.URL(),
|
||||
"content_type", ct)
|
||||
}
|
||||
|
||||
var (
|
||||
writers []io.Writer
|
||||
s256, s512 hash.Hash
|
||||
s256Data, s512Data []byte
|
||||
remoteSHA256, remoteSHA512 []byte
|
||||
signData []byte
|
||||
)
|
||||
|
||||
hashToFetch := []hashFetchInfo{}
|
||||
if file.SHA512URL() != "" {
|
||||
hashToFetch = append(hashToFetch, hashFetchInfo{
|
||||
url: file.SHA512URL(),
|
||||
warn: true,
|
||||
hashType: algSha512,
|
||||
preferred: strings.EqualFold(string(dc.d.cfg.PreferredHash), string(algSha512)),
|
||||
})
|
||||
} else {
|
||||
slog.Info("SHA512 not present")
|
||||
}
|
||||
if file.SHA256URL() != "" {
|
||||
hashToFetch = append(hashToFetch, hashFetchInfo{
|
||||
url: file.SHA256URL(),
|
||||
warn: true,
|
||||
hashType: algSha256,
|
||||
preferred: strings.EqualFold(string(dc.d.cfg.PreferredHash), string(algSha256)),
|
||||
})
|
||||
} else {
|
||||
slog.Info("SHA256 not present")
|
||||
}
|
||||
if file.IsDirectory() {
|
||||
for i := range hashToFetch {
|
||||
hashToFetch[i].warn = false
|
||||
}
|
||||
}
|
||||
|
||||
remoteSHA256, s256Data, remoteSHA512, s512Data = loadHashes(dc.client, hashToFetch)
|
||||
if remoteSHA512 != nil {
|
||||
s512 = sha512.New()
|
||||
writers = append(writers, s512)
|
||||
}
|
||||
if remoteSHA256 != nil {
|
||||
s256 = sha256.New()
|
||||
writers = append(writers, s256)
|
||||
}
|
||||
|
||||
// Remember the data as we need to store it to file later.
|
||||
dc.data.Reset()
|
||||
writers = append(writers, &dc.data)
|
||||
|
||||
// Download the advisory and hash it.
|
||||
hasher := io.MultiWriter(writers...)
|
||||
|
||||
var doc any
|
||||
|
||||
tee := io.TeeReader(resp.Body, hasher)
|
||||
|
||||
if err := misc.StrictJSONParse(tee, &doc); err != nil {
|
||||
dc.stats.downloadFailed++
|
||||
slog.Warn("Downloading failed",
|
||||
"url", file.URL(),
|
||||
"error", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Compare the checksums.
|
||||
s256Check := func() error {
|
||||
if s256 != nil && !bytes.Equal(s256.Sum(nil), remoteSHA256) {
|
||||
dc.stats.sha256Failed++
|
||||
return fmt.Errorf("SHA256 checksum of %s does not match", file.URL())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
s512Check := func() error {
|
||||
if s512 != nil && !bytes.Equal(s512.Sum(nil), remoteSHA512) {
|
||||
dc.stats.sha512Failed++
|
||||
return fmt.Errorf("SHA512 checksum of %s does not match", file.URL())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate OpenPGP signature.
|
||||
keysCheck := func() error {
|
||||
// Only check signature if we have loaded keys.
|
||||
if dc.d.keys == nil {
|
||||
return nil
|
||||
}
|
||||
var sign *crypto.PGPSignature
|
||||
sign, signData, err = loadSignature(dc.client, file.SignURL())
|
||||
if err != nil {
|
||||
slog.Warn("Downloading signature failed",
|
||||
"url", file.SignURL(),
|
||||
"error", err)
|
||||
}
|
||||
if sign != nil {
|
||||
if err := dc.d.checkSignature(dc.data.Bytes(), sign); err != nil {
|
||||
if !dc.d.cfg.IgnoreSignatureCheck {
|
||||
dc.stats.signatureFailed++
|
||||
return fmt.Errorf("cannot verify signature for %s: %v", file.URL(), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate against CSAF schema.
|
||||
schemaCheck := func() error {
|
||||
if errors, err := csaf.ValidateCSAF(doc); err != nil || len(errors) > 0 {
|
||||
dc.stats.schemaFailed++
|
||||
dc.d.logValidationIssues(file.URL(), errors, err)
|
||||
return fmt.Errorf("schema validation for %q failed", file.URL())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate if filename is conforming.
|
||||
filenameCheck := func() error {
|
||||
if err := util.IDMatchesFilename(dc.expr, doc, filename); err != nil {
|
||||
dc.stats.filenameFailed++
|
||||
return fmt.Errorf("filename not conforming %s: %s", file.URL(), err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate against remote validator.
|
||||
remoteValidatorCheck := func() error {
|
||||
if dc.d.validator == nil {
|
||||
return nil
|
||||
}
|
||||
rvr, err := dc.d.validator.Validate(doc)
|
||||
if err != nil {
|
||||
errorCh <- fmt.Errorf(
|
||||
"calling remote validator on %q failed: %w",
|
||||
file.URL(), err)
|
||||
return nil
|
||||
}
|
||||
if !rvr.Valid {
|
||||
dc.stats.remoteFailed++
|
||||
return fmt.Errorf("remote validation of %q failed", file.URL())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Run all the validations.
|
||||
valStatus := notValidatedValidationStatus
|
||||
for _, check := range []func() error{
|
||||
s256Check,
|
||||
s512Check,
|
||||
keysCheck,
|
||||
schemaCheck,
|
||||
filenameCheck,
|
||||
remoteValidatorCheck,
|
||||
} {
|
||||
if err := check(); err != nil {
|
||||
slog.Error("Validation check failed", "error", err)
|
||||
valStatus.update(invalidValidationStatus)
|
||||
if dc.d.cfg.ValidationMode == validationStrict {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
valStatus.update(validValidationStatus)
|
||||
|
||||
// Send to forwarder
|
||||
if dc.d.forwarder != nil {
|
||||
dc.d.forwarder.forward(
|
||||
filename, dc.data.String(),
|
||||
valStatus,
|
||||
string(s256Data),
|
||||
string(s512Data))
|
||||
}
|
||||
|
||||
if dc.d.cfg.NoStore {
|
||||
// Do not write locally.
|
||||
if valStatus == validValidationStatus {
|
||||
dc.stats.succeeded++
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := dc.expr.Extract(
|
||||
`$.document.tracking.initial_release_date`, dc.dateExtract, false, doc,
|
||||
); err != nil {
|
||||
slog.Warn("Cannot extract initial_release_date from advisory",
|
||||
"url", file.URL())
|
||||
dc.initialReleaseDate = time.Now()
|
||||
}
|
||||
dc.initialReleaseDate = dc.initialReleaseDate.UTC()
|
||||
|
||||
// Advisories that failed validation are stored in a special folder.
|
||||
var newDir string
|
||||
if valStatus != validValidationStatus {
|
||||
newDir = path.Join(dc.d.cfg.Directory, failedValidationDir)
|
||||
} else {
|
||||
newDir = dc.d.cfg.Directory
|
||||
}
|
||||
|
||||
// Do we have a configured destination folder?
|
||||
if dc.d.cfg.Folder != "" {
|
||||
newDir = path.Join(newDir, dc.d.cfg.Folder)
|
||||
} else {
|
||||
newDir = path.Join(newDir, dc.lower, strconv.Itoa(dc.initialReleaseDate.Year()))
|
||||
}
|
||||
|
||||
if newDir != dc.lastDir {
|
||||
if err := dc.d.mkdirAll(newDir, 0755); err != nil {
|
||||
errorCh <- err
|
||||
return nil
|
||||
}
|
||||
dc.lastDir = newDir
|
||||
}
|
||||
|
||||
// Write advisory to file
|
||||
path := filepath.Join(dc.lastDir, filename)
|
||||
|
||||
// Write data to disk.
|
||||
for _, x := range []struct {
|
||||
p string
|
||||
d []byte
|
||||
}{
|
||||
{path, dc.data.Bytes()},
|
||||
{path + ".sha256", s256Data},
|
||||
{path + ".sha512", s512Data},
|
||||
{path + ".asc", signData},
|
||||
} {
|
||||
if x.d != nil {
|
||||
if err := os.WriteFile(x.p, x.d, 0644); err != nil {
|
||||
errorCh <- err
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dc.stats.succeeded++
|
||||
slog.Info("Written advisory", "path", path)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *downloader) downloadWorker(
|
||||
ctx context.Context,
|
||||
wg *sync.WaitGroup,
|
||||
label csaf.TLPLabel,
|
||||
files <-chan csaf.AdvisoryFile,
|
||||
errorCh chan<- error,
|
||||
) {
|
||||
defer wg.Done()
|
||||
|
||||
dc := newDownloadContext(d, label)
|
||||
|
||||
// Add collected stats back to total.
|
||||
defer d.addStats(&dc.stats)
|
||||
|
||||
for {
|
||||
var file csaf.AdvisoryFile
|
||||
var ok bool
|
||||
select {
|
||||
case file, ok = <-files:
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
case <-ctx.Done():
|
||||
return
|
||||
}
|
||||
if err := dc.downloadAdvisory(file, errorCh); err != nil {
|
||||
slog.Error("download terminated", "error", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *downloader) mkdirAll(path string, perm os.FileMode) error {
|
||||
d.mkdirMu.Lock()
|
||||
defer d.mkdirMu.Unlock()
|
||||
return os.MkdirAll(path, perm)
|
||||
}
|
||||
|
||||
func (d *downloader) checkSignature(data []byte, sign *crypto.PGPSignature) error {
|
||||
pm := crypto.NewPlainMessage(data)
|
||||
t := crypto.GetUnixTime()
|
||||
return d.keys.VerifyDetached(pm, sign, t)
|
||||
}
|
||||
|
||||
func loadSignature(client util.Client, p string) (*crypto.PGPSignature, []byte, error) {
|
||||
resp, err := client.Get(p)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, nil, fmt.Errorf(
|
||||
"fetching signature from '%s' failed: %s (%d)", p, resp.Status, resp.StatusCode)
|
||||
}
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
sign, err := crypto.NewPGPSignatureFromArmored(string(data))
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return sign, data, nil
|
||||
}
|
||||
|
||||
func loadHashes(client util.Client, hashes []hashFetchInfo) ([]byte, []byte, []byte, []byte) {
|
||||
var remoteSha256, remoteSha512, sha256Data, sha512Data []byte
|
||||
|
||||
// Load preferred hashes first
|
||||
slices.SortStableFunc(hashes, func(a, b hashFetchInfo) int {
|
||||
if a.preferred == b.preferred {
|
||||
return 0
|
||||
}
|
||||
if a.preferred && !b.preferred {
|
||||
return -1
|
||||
}
|
||||
return 1
|
||||
})
|
||||
for _, h := range hashes {
|
||||
if remote, data, err := loadHash(client, h.url); err != nil {
|
||||
if h.warn {
|
||||
slog.Warn("Cannot fetch hash",
|
||||
"hash", h.hashType,
|
||||
"url", h.url,
|
||||
"error", err)
|
||||
} else {
|
||||
slog.Info("Hash not present", "hash", h.hashType, "file", h.url)
|
||||
}
|
||||
} else {
|
||||
switch h.hashType {
|
||||
case algSha512:
|
||||
{
|
||||
remoteSha512 = remote
|
||||
sha512Data = data
|
||||
}
|
||||
case algSha256:
|
||||
{
|
||||
remoteSha256 = remote
|
||||
sha256Data = data
|
||||
}
|
||||
}
|
||||
if h.preferred {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return remoteSha256, sha256Data, remoteSha512, sha512Data
|
||||
}
|
||||
|
||||
func loadHash(client util.Client, p string) ([]byte, []byte, error) {
|
||||
resp, err := client.Get(p)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, nil, fmt.Errorf(
|
||||
"fetching hash from '%s' failed: %s (%d)", p, resp.Status, resp.StatusCode)
|
||||
}
|
||||
var data bytes.Buffer
|
||||
tee := io.TeeReader(resp.Body, &data)
|
||||
hash, err := util.HashFromReader(tee)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return hash, data.Bytes(), nil
|
||||
}
|
||||
|
||||
// run performs the downloads for all the given domains.
|
||||
func (d *downloader) run(ctx context.Context, domains []string) error {
|
||||
defer d.stats.log()
|
||||
for _, domain := range domains {
|
||||
if err := d.download(ctx, domain); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// runEnumerate performs the enumeration of PMDs for all the given domains.
|
||||
func (d *downloader) runEnumerate(domains []string) error {
|
||||
defer d.stats.log()
|
||||
for _, domain := range domains {
|
||||
if err := d.enumerate(domain); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
160
cmd/csaf_downloader/downloader_test.go
Normal file
160
cmd/csaf_downloader/downloader_test.go
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"log/slog"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/options"
|
||||
"github.com/gocsaf/csaf/v3/internal/testutil"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
func checkIfFileExists(path string, t *testing.T) bool {
|
||||
if _, err := os.Stat(path); err == nil {
|
||||
return true
|
||||
} else if !errors.Is(err, os.ErrNotExist) {
|
||||
t.Fatalf("Failed to check if file exists: %v", err)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func TestShaMarking(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
directoryProvider bool
|
||||
wantSha256 bool
|
||||
wantSha512 bool
|
||||
enableSha256 bool
|
||||
enableSha512 bool
|
||||
preferredHash hashAlgorithm
|
||||
}{
|
||||
{
|
||||
name: "want sha256 and sha512",
|
||||
directoryProvider: false,
|
||||
wantSha256: true,
|
||||
wantSha512: true,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
},
|
||||
{
|
||||
name: "only want sha256",
|
||||
directoryProvider: false,
|
||||
wantSha256: true,
|
||||
wantSha512: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
preferredHash: algSha256,
|
||||
},
|
||||
{
|
||||
name: "only want sha512",
|
||||
directoryProvider: false,
|
||||
wantSha256: false,
|
||||
wantSha512: true,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
preferredHash: algSha512,
|
||||
},
|
||||
{
|
||||
name: "only want sha512",
|
||||
directoryProvider: false,
|
||||
wantSha256: false,
|
||||
wantSha512: true,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
preferredHash: algSha512,
|
||||
},
|
||||
|
||||
{
|
||||
name: "only deliver sha256",
|
||||
directoryProvider: false,
|
||||
wantSha256: true,
|
||||
wantSha512: false,
|
||||
enableSha256: true,
|
||||
enableSha512: false,
|
||||
preferredHash: algSha512,
|
||||
},
|
||||
{
|
||||
name: "only want sha256, directory provider",
|
||||
directoryProvider: true,
|
||||
wantSha256: true,
|
||||
wantSha512: false,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
preferredHash: algSha256,
|
||||
},
|
||||
{
|
||||
name: "only want sha512, directory provider",
|
||||
directoryProvider: true,
|
||||
wantSha256: false,
|
||||
wantSha512: true,
|
||||
enableSha256: true,
|
||||
enableSha512: true,
|
||||
preferredHash: algSha512,
|
||||
},
|
||||
}
|
||||
|
||||
t.Parallel()
|
||||
for _, testToRun := range tests {
|
||||
test := testToRun
|
||||
t.Run(test.name, func(tt *testing.T) {
|
||||
tt.Parallel()
|
||||
serverURL := ""
|
||||
params := testutil.ProviderParams{
|
||||
URL: "",
|
||||
EnableSha256: test.enableSha256,
|
||||
EnableSha512: test.enableSha512,
|
||||
}
|
||||
server := httptest.NewTLSServer(testutil.ProviderHandler(¶ms, test.directoryProvider))
|
||||
defer server.Close()
|
||||
|
||||
serverURL = server.URL
|
||||
params.URL = server.URL
|
||||
|
||||
hClient := server.Client()
|
||||
client := util.Client(hClient)
|
||||
|
||||
tempDir := t.TempDir()
|
||||
cfg := config{LogLevel: &options.LogLevel{Level: slog.LevelDebug}, Directory: tempDir, PreferredHash: test.preferredHash}
|
||||
err := cfg.prepare()
|
||||
if err != nil {
|
||||
t.Fatalf("SHA marking config failed: %v", err)
|
||||
}
|
||||
d, err := newDownloader(&cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("could not init downloader: %v", err)
|
||||
}
|
||||
d.client = &client
|
||||
|
||||
ctx := context.Background()
|
||||
err = d.run(ctx, []string{serverURL + "/provider-metadata.json"})
|
||||
if err != nil {
|
||||
t.Errorf("SHA marking %v: Expected no error, got: %v", test.name, err)
|
||||
}
|
||||
d.close()
|
||||
|
||||
// Check for downloaded hashes
|
||||
sha256Exists := checkIfFileExists(tempDir+"/white/2020/avendor-advisory-0004.json.sha256", t)
|
||||
sha512Exists := checkIfFileExists(tempDir+"/white/2020/avendor-advisory-0004.json.sha512", t)
|
||||
|
||||
if sha256Exists != test.wantSha256 {
|
||||
t.Errorf("%v: expected sha256 hash present to be %v, got: %v", test.name, test.wantSha256, sha256Exists)
|
||||
}
|
||||
|
||||
if sha512Exists != test.wantSha512 {
|
||||
t.Errorf("%v: expected sha512 hash present to be %v, got: %v", test.name, test.wantSha512, sha512Exists)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
281
cmd/csaf_downloader/forwarder.go
Normal file
281
cmd/csaf_downloader/forwarder.go
Normal file
|
|
@ -0,0 +1,281 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
"io"
|
||||
"log/slog"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
// failedForwardDir is the name of the special sub folder
|
||||
// where advisories get stored which fail forwarding.
|
||||
const failedForwardDir = "failed_forward"
|
||||
|
||||
// validationStatus represents the validation status
|
||||
// known to the HTTP endpoint.
|
||||
type validationStatus string
|
||||
|
||||
const (
|
||||
validValidationStatus = validationStatus("valid")
|
||||
invalidValidationStatus = validationStatus("invalid")
|
||||
notValidatedValidationStatus = validationStatus("not_validated")
|
||||
)
|
||||
|
||||
func (vs *validationStatus) update(status validationStatus) {
|
||||
// Cannot heal after it fails at least once.
|
||||
if *vs != invalidValidationStatus {
|
||||
*vs = status
|
||||
}
|
||||
}
|
||||
|
||||
// forwarder forwards downloaded advisories to a given
|
||||
// HTTP endpoint.
|
||||
type forwarder struct {
|
||||
cfg *config
|
||||
cmds chan func(*forwarder)
|
||||
client util.Client
|
||||
|
||||
failed int
|
||||
succeeded int
|
||||
}
|
||||
|
||||
// newForwarder creates a new forwarder.
|
||||
func newForwarder(cfg *config) *forwarder {
|
||||
queue := cfg.ForwardQueue
|
||||
if queue < 1 {
|
||||
queue = 1
|
||||
}
|
||||
return &forwarder{
|
||||
cfg: cfg,
|
||||
cmds: make(chan func(*forwarder), queue),
|
||||
}
|
||||
}
|
||||
|
||||
// run runs the forwarder. Meant to be used in a Go routine.
|
||||
func (f *forwarder) run() {
|
||||
defer slog.Debug("forwarder done")
|
||||
|
||||
for cmd := range f.cmds {
|
||||
cmd(f)
|
||||
}
|
||||
}
|
||||
|
||||
// close terminates the forwarder.
|
||||
func (f *forwarder) close() {
|
||||
close(f.cmds)
|
||||
}
|
||||
|
||||
// log logs the current statistics.
|
||||
func (f *forwarder) log() {
|
||||
f.cmds <- func(f *forwarder) {
|
||||
slog.Info("Forward statistics",
|
||||
"succeeded", f.succeeded,
|
||||
"failed", f.failed)
|
||||
}
|
||||
}
|
||||
|
||||
// httpClient returns a cached HTTP client used for uploading
|
||||
// the advisories to the configured HTTP endpoint.
|
||||
func (f *forwarder) httpClient() util.Client {
|
||||
if f.client != nil {
|
||||
return f.client
|
||||
}
|
||||
|
||||
hClient := http.Client{}
|
||||
|
||||
var tlsConfig tls.Config
|
||||
if f.cfg.ForwardInsecure {
|
||||
tlsConfig.InsecureSkipVerify = true
|
||||
}
|
||||
|
||||
hClient.Transport = &http.Transport{
|
||||
TLSClientConfig: &tlsConfig,
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
}
|
||||
|
||||
client := util.Client(&hClient)
|
||||
|
||||
// Add extra headers.
|
||||
client = &util.HeaderClient{
|
||||
Client: client,
|
||||
Header: f.cfg.ForwardHeader,
|
||||
}
|
||||
|
||||
// Add optional URL logging.
|
||||
if f.cfg.verbose() {
|
||||
client = &util.LoggingClient{
|
||||
Client: client,
|
||||
Log: httpLog("forwarder"),
|
||||
}
|
||||
}
|
||||
|
||||
f.client = client
|
||||
return f.client
|
||||
}
|
||||
|
||||
// replaceExt replaces the extension of a given filename.
|
||||
func replaceExt(fname, nExt string) string {
|
||||
ext := filepath.Ext(fname)
|
||||
return fname[:len(fname)-len(ext)] + nExt
|
||||
}
|
||||
|
||||
// buildRequest creates an HTTP request suited to forward the given advisory.
|
||||
func (f *forwarder) buildRequest(
|
||||
filename, doc string,
|
||||
status validationStatus,
|
||||
sha256, sha512 string,
|
||||
) (*http.Request, error) {
|
||||
body := new(bytes.Buffer)
|
||||
writer := multipart.NewWriter(body)
|
||||
|
||||
var err error
|
||||
part := func(name, fname, mimeType, content string) {
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if fname == "" {
|
||||
err = writer.WriteField(name, content)
|
||||
return
|
||||
}
|
||||
var w io.Writer
|
||||
if w, err = misc.CreateFormFile(writer, name, fname, mimeType); err == nil {
|
||||
_, err = w.Write([]byte(content))
|
||||
}
|
||||
}
|
||||
|
||||
base := filepath.Base(filename)
|
||||
part("advisory", base, "application/json", doc)
|
||||
part("validation_status", "", "text/plain", string(status))
|
||||
if sha256 != "" {
|
||||
part("hash-256", replaceExt(base, ".sha256"), "text/plain", sha256)
|
||||
}
|
||||
if sha512 != "" {
|
||||
part("hash-512", replaceExt(base, ".sha512"), "text/plain", sha512)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := writer.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req, err := http.NewRequest(http.MethodPost, f.cfg.ForwardURL, body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
contentType := writer.FormDataContentType()
|
||||
req.Header.Set("Content-Type", contentType)
|
||||
return req, nil
|
||||
}
|
||||
|
||||
// storeFailedAdvisory stores an advisory in a special folder
|
||||
// in case the forwarding failed.
|
||||
func (f *forwarder) storeFailedAdvisory(filename, doc, sha256, sha512 string) error {
|
||||
// Create special folder if it does not exist.
|
||||
dir := filepath.Join(f.cfg.Directory, failedForwardDir)
|
||||
if err := os.MkdirAll(dir, 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
// Store parts which are not empty.
|
||||
for _, x := range []struct {
|
||||
p string
|
||||
d string
|
||||
}{
|
||||
{filename, doc},
|
||||
{filename + ".sha256", sha256},
|
||||
{filename + ".sha512", sha512},
|
||||
} {
|
||||
if len(x.d) != 0 {
|
||||
path := filepath.Join(dir, x.p)
|
||||
if err := os.WriteFile(path, []byte(x.d), 0644); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// storeFailed is a logging wrapper around storeFailedAdvisory.
|
||||
func (f *forwarder) storeFailed(filename, doc, sha256, sha512 string) {
|
||||
f.failed++
|
||||
if err := f.storeFailedAdvisory(filename, doc, sha256, sha512); err != nil {
|
||||
slog.Error("Storing advisory failed forwarding failed",
|
||||
"error", err)
|
||||
}
|
||||
}
|
||||
|
||||
// limitedString reads max bytes from reader and returns it as a string.
|
||||
// Longer strings are indicated by "..." as a suffix.
|
||||
func limitedString(r io.Reader, maxLength int) (string, error) {
|
||||
var msg strings.Builder
|
||||
if _, err := io.Copy(&msg, io.LimitReader(r, int64(maxLength))); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if msg.Len() >= maxLength {
|
||||
msg.WriteString("...")
|
||||
}
|
||||
return msg.String(), nil
|
||||
}
|
||||
|
||||
// forward sends a given document with filename, status and
|
||||
// checksums to the forwarder. This is async to the degree
|
||||
// till the configured queue size is filled.
|
||||
func (f *forwarder) forward(
|
||||
filename, doc string,
|
||||
status validationStatus,
|
||||
sha256, sha512 string,
|
||||
) {
|
||||
// Run this in the main loop of the forwarder.
|
||||
f.cmds <- func(f *forwarder) {
|
||||
req, err := f.buildRequest(filename, doc, status, sha256, sha512)
|
||||
if err != nil {
|
||||
slog.Error("building forward Request failed",
|
||||
"error", err)
|
||||
f.storeFailed(filename, doc, sha256, sha512)
|
||||
return
|
||||
}
|
||||
res, err := f.httpClient().Do(req)
|
||||
if err != nil {
|
||||
slog.Error("sending forward request failed",
|
||||
"error", err)
|
||||
f.storeFailed(filename, doc, sha256, sha512)
|
||||
return
|
||||
}
|
||||
if res.StatusCode != http.StatusCreated {
|
||||
defer res.Body.Close()
|
||||
if msg, err := limitedString(res.Body, 512); err != nil {
|
||||
slog.Error("reading forward result failed",
|
||||
"error", err)
|
||||
} else {
|
||||
slog.Error("forwarding failed",
|
||||
"filename", filename,
|
||||
"body", msg,
|
||||
"status_code", res.StatusCode)
|
||||
}
|
||||
f.storeFailed(filename, doc, sha256, sha512)
|
||||
} else {
|
||||
f.succeeded++
|
||||
slog.Debug(
|
||||
"forwarding succeeded",
|
||||
"filename", filename)
|
||||
}
|
||||
}
|
||||
}
|
||||
429
cmd/csaf_downloader/forwarder_test.go
Normal file
429
cmd/csaf_downloader/forwarder_test.go
Normal file
|
|
@ -0,0 +1,429 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"log/slog"
|
||||
"mime"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/options"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
func TestValidationStatusUpdate(t *testing.T) {
|
||||
sv := validValidationStatus
|
||||
sv.update(invalidValidationStatus)
|
||||
sv.update(validValidationStatus)
|
||||
if sv != invalidValidationStatus {
|
||||
t.Fatalf("got %q expected %q", sv, invalidValidationStatus)
|
||||
}
|
||||
sv = notValidatedValidationStatus
|
||||
sv.update(validValidationStatus)
|
||||
sv.update(notValidatedValidationStatus)
|
||||
if sv != notValidatedValidationStatus {
|
||||
t.Fatalf("got %q expected %q", sv, notValidatedValidationStatus)
|
||||
}
|
||||
}
|
||||
|
||||
func TestForwarderLogStats(t *testing.T) {
|
||||
orig := slog.Default()
|
||||
defer slog.SetDefault(orig)
|
||||
|
||||
var buf bytes.Buffer
|
||||
h := slog.NewJSONHandler(&buf, &slog.HandlerOptions{
|
||||
Level: slog.LevelInfo,
|
||||
})
|
||||
lg := slog.New(h)
|
||||
slog.SetDefault(lg)
|
||||
|
||||
cfg := &config{}
|
||||
fw := newForwarder(cfg)
|
||||
fw.failed = 11
|
||||
fw.succeeded = 13
|
||||
|
||||
done := make(chan struct{})
|
||||
go func() {
|
||||
defer close(done)
|
||||
fw.run()
|
||||
}()
|
||||
fw.log()
|
||||
fw.close()
|
||||
<-done
|
||||
|
||||
type fwStats struct {
|
||||
Msg string `json:"msg"`
|
||||
Succeeded int `json:"succeeded"`
|
||||
Failed int `json:"failed"`
|
||||
}
|
||||
sc := bufio.NewScanner(bytes.NewReader(buf.Bytes()))
|
||||
found := false
|
||||
for sc.Scan() {
|
||||
var fws fwStats
|
||||
if err := json.Unmarshal(sc.Bytes(), &fws); err != nil {
|
||||
t.Fatalf("JSON parsing log failed: %v", err)
|
||||
}
|
||||
if fws.Msg == "Forward statistics" &&
|
||||
fws.Failed == 11 &&
|
||||
fws.Succeeded == 13 {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if err := sc.Err(); err != nil {
|
||||
t.Fatalf("scanning log failed: %v", err)
|
||||
}
|
||||
if !found {
|
||||
t.Fatal("Cannot find forward statistics in log")
|
||||
}
|
||||
}
|
||||
|
||||
func TestForwarderHTTPClient(t *testing.T) {
|
||||
cfg := &config{
|
||||
ForwardInsecure: true,
|
||||
ForwardHeader: http.Header{
|
||||
"User-Agent": []string{"curl/7.55.1"},
|
||||
},
|
||||
LogLevel: &options.LogLevel{Level: slog.LevelDebug},
|
||||
}
|
||||
fw := newForwarder(cfg)
|
||||
if c1, c2 := fw.httpClient(), fw.httpClient(); c1 != c2 {
|
||||
t.Fatal("expected to return same client twice")
|
||||
}
|
||||
}
|
||||
|
||||
func TestForwarderReplaceExtension(t *testing.T) {
|
||||
for _, x := range [][2]string{
|
||||
{"foo", "foo.ext"},
|
||||
{"foo.bar", "foo.ext"},
|
||||
{".bar", ".ext"},
|
||||
{"", ".ext"},
|
||||
} {
|
||||
if got := replaceExt(x[0], ".ext"); got != x[1] {
|
||||
t.Fatalf("got %q expected %q", got, x[1])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestForwarderBuildRequest(t *testing.T) {
|
||||
|
||||
// Good case ...
|
||||
cfg := &config{
|
||||
ForwardURL: "https://example.com",
|
||||
}
|
||||
fw := newForwarder(cfg)
|
||||
|
||||
req, err := fw.buildRequest(
|
||||
"test.json", "{}",
|
||||
invalidValidationStatus,
|
||||
"256",
|
||||
"512")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("buildRequest failed: %v", err)
|
||||
}
|
||||
mediaType, params, err := mime.ParseMediaType(req.Header.Get("Content-Type"))
|
||||
if err != nil {
|
||||
t.Fatalf("no Content-Type found")
|
||||
}
|
||||
if !strings.HasPrefix(mediaType, "multipart/") {
|
||||
t.Fatalf("media type is not multipart")
|
||||
}
|
||||
mr := multipart.NewReader(req.Body, params["boundary"])
|
||||
|
||||
var foundAdvisory, foundValidationStatus, found256, found512 bool
|
||||
|
||||
for {
|
||||
p, err := mr.NextPart()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("parsing multipart failed: %v", err)
|
||||
}
|
||||
data, err := io.ReadAll(p)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
cd := p.Header["Content-Disposition"]
|
||||
if len(cd) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
switch contains := func(name string) bool {
|
||||
return strings.Contains(cd[0], `name="`+name+`"`)
|
||||
}; {
|
||||
case contains("advisory"):
|
||||
if a := string(data); a != "{}" {
|
||||
t.Fatalf("advisory: got %q expected %q", a, "{}")
|
||||
}
|
||||
foundAdvisory = true
|
||||
case contains("validation_status"):
|
||||
if vs := validationStatus(data); vs != invalidValidationStatus {
|
||||
t.Fatalf("validation_status: got %q expected %q",
|
||||
vs, invalidValidationStatus)
|
||||
}
|
||||
foundValidationStatus = true
|
||||
case contains("hash-256"):
|
||||
if h := string(data); h != "256" {
|
||||
t.Fatalf("hash-256: got %q expected %q", h, "256")
|
||||
}
|
||||
found256 = true
|
||||
case contains("hash-512"):
|
||||
if h := string(data); h != "512" {
|
||||
t.Fatalf("hash-512: got %q expected %q", h, "512")
|
||||
}
|
||||
found512 = true
|
||||
}
|
||||
}
|
||||
|
||||
switch {
|
||||
case !foundAdvisory:
|
||||
t.Fatal("advisory not found")
|
||||
case !foundValidationStatus:
|
||||
t.Fatal("validation_status not found")
|
||||
case !found256:
|
||||
t.Fatal("hash-256 not found")
|
||||
case !found512:
|
||||
t.Fatal("hash-512 not found")
|
||||
}
|
||||
|
||||
// Bad case ...
|
||||
cfg.ForwardURL = "%"
|
||||
|
||||
if _, err := fw.buildRequest(
|
||||
"test.json", "{}",
|
||||
invalidValidationStatus,
|
||||
"256",
|
||||
"512",
|
||||
); err == nil {
|
||||
t.Fatal("bad forward URL should result in an error")
|
||||
}
|
||||
}
|
||||
|
||||
type badReader struct{ error }
|
||||
|
||||
func (br *badReader) Read([]byte) (int, error) { return 0, br.error }
|
||||
|
||||
func TestLimitedString(t *testing.T) {
|
||||
for _, x := range [][2]string{
|
||||
{"xx", "xx"},
|
||||
{"xxx", "xxx..."},
|
||||
{"xxxx", "xxx..."},
|
||||
} {
|
||||
got, err := limitedString(strings.NewReader(x[0]), 3)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if got != x[1] {
|
||||
t.Fatalf("got %q expected %q", got, x[1])
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := limitedString(&badReader{error: os.ErrInvalid}, 3); err == nil {
|
||||
t.Fatal("expected to fail with an error")
|
||||
}
|
||||
}
|
||||
|
||||
func TestStoreFailedAdvisory(t *testing.T) {
|
||||
dir, err := os.MkdirTemp("", "storeFailedAdvisory")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
cfg := &config{Directory: dir}
|
||||
fw := newForwarder(cfg)
|
||||
|
||||
badDir := filepath.Join(dir, failedForwardDir)
|
||||
if err := os.WriteFile(badDir, []byte("test"), 0664); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := fw.storeFailedAdvisory("advisory.json", "{}", "256", "512"); err == nil {
|
||||
t.Fatal("if the destination exists as a file an error should occur")
|
||||
}
|
||||
|
||||
if err := os.Remove(badDir); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := fw.storeFailedAdvisory("advisory.json", "{}", "256", "512"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
sha256Path := filepath.Join(dir, failedForwardDir, "advisory.json.sha256")
|
||||
|
||||
// Write protect advisory.
|
||||
if err := os.Chmod(sha256Path, 0); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := fw.storeFailedAdvisory("advisory.json", "{}", "256", "512"); err == nil {
|
||||
t.Fatal("expected to fail with an error")
|
||||
}
|
||||
|
||||
if err := os.Chmod(sha256Path, 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStoredFailed(t *testing.T) {
|
||||
dir, err := os.MkdirTemp("", "storeFailed")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
orig := slog.Default()
|
||||
defer slog.SetDefault(orig)
|
||||
|
||||
var buf bytes.Buffer
|
||||
h := slog.NewJSONHandler(&buf, &slog.HandlerOptions{
|
||||
Level: slog.LevelError,
|
||||
})
|
||||
lg := slog.New(h)
|
||||
slog.SetDefault(lg)
|
||||
|
||||
cfg := &config{Directory: dir}
|
||||
fw := newForwarder(cfg)
|
||||
|
||||
// An empty filename should lead to an error.
|
||||
fw.storeFailed("", "{}", "256", "512")
|
||||
|
||||
if fw.failed != 1 {
|
||||
t.Fatalf("got %d expected 1", fw.failed)
|
||||
}
|
||||
|
||||
type entry struct {
|
||||
Msg string `json:"msg"`
|
||||
Level string `json:"level"`
|
||||
}
|
||||
|
||||
sc := bufio.NewScanner(bytes.NewReader(buf.Bytes()))
|
||||
found := false
|
||||
for sc.Scan() {
|
||||
var e entry
|
||||
if err := json.Unmarshal(sc.Bytes(), &e); err != nil {
|
||||
t.Fatalf("JSON parsing log failed: %v", err)
|
||||
}
|
||||
if e.Msg == "Storing advisory failed forwarding failed" && e.Level == "ERROR" {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if err := sc.Err(); err != nil {
|
||||
t.Fatalf("scanning log failed: %v", err)
|
||||
}
|
||||
if !found {
|
||||
t.Fatal("Cannot error logging statistics in log")
|
||||
}
|
||||
}
|
||||
|
||||
type fakeClient struct {
|
||||
util.Client
|
||||
state int
|
||||
}
|
||||
|
||||
func (fc *fakeClient) Do(*http.Request) (*http.Response, error) {
|
||||
// The different states simulates different responses from the remote API.
|
||||
switch fc.state {
|
||||
case 0:
|
||||
fc.state = 1
|
||||
return &http.Response{
|
||||
Status: http.StatusText(http.StatusCreated),
|
||||
StatusCode: http.StatusCreated,
|
||||
}, nil
|
||||
case 1:
|
||||
fc.state = 2
|
||||
return nil, errors.New("does not work")
|
||||
case 2:
|
||||
fc.state = 3
|
||||
return &http.Response{
|
||||
Status: http.StatusText(http.StatusBadRequest),
|
||||
StatusCode: http.StatusBadRequest,
|
||||
Body: io.NopCloser(&badReader{error: os.ErrInvalid}),
|
||||
}, nil
|
||||
default:
|
||||
return &http.Response{
|
||||
Status: http.StatusText(http.StatusBadRequest),
|
||||
StatusCode: http.StatusBadRequest,
|
||||
Body: io.NopCloser(strings.NewReader("This was bad!")),
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
|
||||
func TestForwarderForward(t *testing.T) {
|
||||
dir, err := os.MkdirTemp("", "forward")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
orig := slog.Default()
|
||||
defer slog.SetDefault(orig)
|
||||
|
||||
// We dont care in details here as we captured them
|
||||
// in the other test cases.
|
||||
h := slog.NewJSONHandler(io.Discard, nil)
|
||||
lg := slog.New(h)
|
||||
slog.SetDefault(lg)
|
||||
|
||||
cfg := &config{
|
||||
ForwardURL: "http://example.com",
|
||||
Directory: dir,
|
||||
}
|
||||
fw := newForwarder(cfg)
|
||||
|
||||
// Use the fact that http client is cached.
|
||||
fw.client = &fakeClient{}
|
||||
|
||||
done := make(chan struct{})
|
||||
|
||||
go func() {
|
||||
defer close(done)
|
||||
fw.run()
|
||||
}()
|
||||
|
||||
// Iterate through states of http client.
|
||||
for i := 0; i <= 3; i++ {
|
||||
fw.forward(
|
||||
"test.json", "{}",
|
||||
invalidValidationStatus,
|
||||
"256",
|
||||
"512")
|
||||
}
|
||||
|
||||
// Make buildRequest fail.
|
||||
wait := make(chan struct{})
|
||||
fw.cmds <- func(f *forwarder) {
|
||||
f.cfg.ForwardURL = "%"
|
||||
close(wait)
|
||||
}
|
||||
<-wait
|
||||
fw.forward(
|
||||
"test.json", "{}",
|
||||
invalidValidationStatus,
|
||||
"256",
|
||||
"512")
|
||||
|
||||
fw.close()
|
||||
|
||||
<-done
|
||||
}
|
||||
63
cmd/csaf_downloader/main.go
Normal file
63
cmd/csaf_downloader/main.go
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
// Package main implements the csaf_downloader tool.
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/signal"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/options"
|
||||
)
|
||||
|
||||
func run(cfg *config, domains []string) error {
|
||||
d, err := newDownloader(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer d.close()
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
ctx, stop := signal.NotifyContext(ctx, os.Interrupt)
|
||||
defer stop()
|
||||
|
||||
if cfg.ForwardURL != "" {
|
||||
f := newForwarder(cfg)
|
||||
go f.run()
|
||||
defer func() {
|
||||
f.log()
|
||||
f.close()
|
||||
}()
|
||||
d.forwarder = f
|
||||
}
|
||||
|
||||
// If the enumerate-only flag is set, enumerate found PMDs,
|
||||
// else use the normal load method
|
||||
if cfg.EnumeratePMDOnly {
|
||||
return d.runEnumerate(domains)
|
||||
}
|
||||
return d.run(ctx, domains)
|
||||
}
|
||||
|
||||
func main() {
|
||||
|
||||
domains, cfg, err := parseArgsConfig()
|
||||
options.ErrorCheck(err)
|
||||
options.ErrorCheck(cfg.prepare())
|
||||
|
||||
if len(domains) == 0 {
|
||||
slog.Warn("No domains given.")
|
||||
return
|
||||
}
|
||||
|
||||
options.ErrorCheck(run(cfg, domains))
|
||||
}
|
||||
59
cmd/csaf_downloader/stats.go
Normal file
59
cmd/csaf_downloader/stats.go
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import "log/slog"
|
||||
|
||||
// stats contains counters of the downloads.
|
||||
type stats struct {
|
||||
downloadFailed int
|
||||
filenameFailed int
|
||||
schemaFailed int
|
||||
remoteFailed int
|
||||
sha256Failed int
|
||||
sha512Failed int
|
||||
signatureFailed int
|
||||
succeeded int
|
||||
}
|
||||
|
||||
// add adds other stats to this.
|
||||
func (st *stats) add(o *stats) {
|
||||
st.downloadFailed += o.downloadFailed
|
||||
st.filenameFailed += o.filenameFailed
|
||||
st.schemaFailed += o.schemaFailed
|
||||
st.remoteFailed += o.remoteFailed
|
||||
st.sha256Failed += o.sha256Failed
|
||||
st.sha512Failed += o.sha512Failed
|
||||
st.signatureFailed += o.signatureFailed
|
||||
st.succeeded += o.succeeded
|
||||
}
|
||||
|
||||
func (st *stats) totalFailed() int {
|
||||
return st.downloadFailed +
|
||||
st.filenameFailed +
|
||||
st.schemaFailed +
|
||||
st.remoteFailed +
|
||||
st.sha256Failed +
|
||||
st.sha512Failed +
|
||||
st.signatureFailed
|
||||
}
|
||||
|
||||
// log logs the collected stats.
|
||||
func (st *stats) log() {
|
||||
slog.Info("Download statistics",
|
||||
"succeeded", st.succeeded,
|
||||
"total_failed", st.totalFailed(),
|
||||
"filename_failed", st.filenameFailed,
|
||||
"download_failed", st.downloadFailed,
|
||||
"schema_failed", st.schemaFailed,
|
||||
"remote_failed", st.remoteFailed,
|
||||
"sha256_failed", st.sha256Failed,
|
||||
"sha512_failed", st.sha512Failed,
|
||||
"signature_failed", st.signatureFailed)
|
||||
}
|
||||
112
cmd/csaf_downloader/stats_test.go
Normal file
112
cmd/csaf_downloader/stats_test.go
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestStatsAdd(t *testing.T) {
|
||||
a := stats{
|
||||
downloadFailed: 2,
|
||||
filenameFailed: 3,
|
||||
schemaFailed: 5,
|
||||
remoteFailed: 7,
|
||||
sha256Failed: 11,
|
||||
sha512Failed: 13,
|
||||
signatureFailed: 17,
|
||||
succeeded: 19,
|
||||
}
|
||||
b := a
|
||||
a.add(&b)
|
||||
b.downloadFailed *= 2
|
||||
b.filenameFailed *= 2
|
||||
b.schemaFailed *= 2
|
||||
b.remoteFailed *= 2
|
||||
b.sha256Failed *= 2
|
||||
b.sha512Failed *= 2
|
||||
b.signatureFailed *= 2
|
||||
b.succeeded *= 2
|
||||
if a != b {
|
||||
t.Fatalf("%v != %v", a, b)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStatsTotalFailed(t *testing.T) {
|
||||
a := stats{
|
||||
downloadFailed: 2,
|
||||
filenameFailed: 3,
|
||||
schemaFailed: 5,
|
||||
remoteFailed: 7,
|
||||
sha256Failed: 11,
|
||||
sha512Failed: 13,
|
||||
signatureFailed: 17,
|
||||
}
|
||||
sum := a.downloadFailed +
|
||||
a.filenameFailed +
|
||||
a.schemaFailed +
|
||||
a.remoteFailed +
|
||||
a.sha256Failed +
|
||||
a.sha512Failed +
|
||||
a.signatureFailed
|
||||
if got := a.totalFailed(); got != sum {
|
||||
t.Fatalf("got %d expected %d", got, sum)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStatsLog(t *testing.T) {
|
||||
var out bytes.Buffer
|
||||
h := slog.NewJSONHandler(&out, &slog.HandlerOptions{Level: slog.LevelInfo})
|
||||
orig := slog.Default()
|
||||
defer slog.SetDefault(orig)
|
||||
slog.SetDefault(slog.New(h))
|
||||
a := stats{
|
||||
downloadFailed: 2,
|
||||
filenameFailed: 3,
|
||||
schemaFailed: 5,
|
||||
remoteFailed: 7,
|
||||
sha256Failed: 11,
|
||||
sha512Failed: 13,
|
||||
signatureFailed: 17,
|
||||
succeeded: 19,
|
||||
}
|
||||
a.log()
|
||||
type result struct {
|
||||
Succeeded int `json:"succeeded"`
|
||||
TotalFailed int `json:"total_failed"`
|
||||
FilenameFailed int `json:"filename_failed"`
|
||||
DownloadFailed int `json:"download_failed"`
|
||||
SchemaFailed int `json:"schema_failed"`
|
||||
RemoteFailed int `json:"remote_failed"`
|
||||
SHA256Failed int `json:"sha256_failed"`
|
||||
SHA512Failed int `json:"sha512_failed"`
|
||||
SignatureFailed int `json:"signature_failed"`
|
||||
}
|
||||
var got result
|
||||
if err := json.Unmarshal(out.Bytes(), &got); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
want := result{
|
||||
Succeeded: a.succeeded,
|
||||
TotalFailed: a.totalFailed(),
|
||||
FilenameFailed: a.filenameFailed,
|
||||
DownloadFailed: a.downloadFailed,
|
||||
SchemaFailed: a.schemaFailed,
|
||||
RemoteFailed: a.remoteFailed,
|
||||
SHA256Failed: a.sha256Failed,
|
||||
SHA512Failed: a.sha512Failed,
|
||||
SignatureFailed: a.signatureFailed,
|
||||
}
|
||||
if got != want {
|
||||
t.Fatalf("%v != %v", got, want)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,10 +1,10 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
|
|
@ -14,6 +14,7 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
|
@ -21,9 +22,12 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ProtonMail/gopenpgp/v2/armor"
|
||||
"github.com/ProtonMail/gopenpgp/v2/constants"
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
const dateFormat = time.RFC3339
|
||||
|
|
@ -39,8 +43,13 @@ func (c *controller) loadCSAF(r *http.Request) (string, []byte, error) {
|
|||
}
|
||||
defer file.Close()
|
||||
|
||||
if !util.ConfirmingFileName(handler.Filename) {
|
||||
return "", nil, errors.New("given csaf filename is not confirming")
|
||||
// We reject everything which is not announced as JSON.
|
||||
if handler.Header.Get("Content-Type") != "application/json" {
|
||||
return "", nil, errors.New("expected content type 'application/json'")
|
||||
}
|
||||
|
||||
if !util.ConformingFileName(handler.Filename) {
|
||||
return "", nil, errors.New("given csaf filename is not conforming")
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
|
@ -112,7 +121,8 @@ func (c *controller) handleSignature(
|
|||
return "", nil, err
|
||||
}
|
||||
|
||||
armored, err := sig.GetArmored()
|
||||
armored, err := armor.ArmorWithTypeAndCustomHeaders(
|
||||
sig.Data, constants.PGPSignatureHeader, "", "")
|
||||
return armored, key, err
|
||||
}
|
||||
|
||||
|
|
@ -128,7 +138,7 @@ func (c *controller) tlpParam(r *http.Request) (tlp, error) {
|
|||
|
||||
// create calls the "ensureFolders" functions to create the directories and files.
|
||||
// It returns a struct by success, otherwise an error.
|
||||
func (c *controller) create(*http.Request) (interface{}, error) {
|
||||
func (c *controller) create(*http.Request) (any, error) {
|
||||
if err := ensureFolders(c.cfg); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -140,19 +150,19 @@ func (c *controller) create(*http.Request) (interface{}, error) {
|
|||
}, nil
|
||||
}
|
||||
|
||||
func (c *controller) upload(r *http.Request) (interface{}, error) {
|
||||
func (c *controller) upload(r *http.Request) (any, error) {
|
||||
|
||||
newCSAF, data, err := c.loadCSAF(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var content interface{}
|
||||
var content any
|
||||
if err := json.Unmarshal(data, &content); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Validate againt JSON schema.
|
||||
// Validate against JSON schema.
|
||||
if !c.cfg.NoValidation {
|
||||
validationErrors, err := csaf.ValidateCSAF(content)
|
||||
if err != nil {
|
||||
|
|
@ -164,10 +174,50 @@ func (c *controller) upload(r *http.Request) (interface{}, error) {
|
|||
}
|
||||
}
|
||||
|
||||
ex, err := csaf.NewAdvisorySummary(util.NewPathEval(), content)
|
||||
// Validate against remote validator.
|
||||
if c.cfg.RemoteValidator != nil {
|
||||
validator, err := c.cfg.RemoteValidator.Open()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rvr, err := validator.Validate(content)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !rvr.Valid {
|
||||
return nil, errors.New("does not validate against remote validator")
|
||||
}
|
||||
}
|
||||
|
||||
// Extract informations from the document.
|
||||
pe := util.NewPathEval()
|
||||
|
||||
ex, err := csaf.NewAdvisorySummary(pe, content)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if util.CleanFileName(ex.ID) != newCSAF {
|
||||
return nil, fmt.Errorf("ID %q does not match filename %s",
|
||||
ex.ID, newCSAF)
|
||||
}
|
||||
|
||||
// Check if we have to search for dynamic categories.
|
||||
var dynamicCategories []string
|
||||
if catExprs := c.cfg.DynamicCategories(); len(catExprs) > 0 {
|
||||
matcher := util.StringTreeMatcher(&dynamicCategories)
|
||||
|
||||
for _, expr := range catExprs {
|
||||
// Compile first to check that the expression is okay.
|
||||
if _, err := pe.Compile(expr); err != nil {
|
||||
log.Printf("Compiling category expression %q failed: %v\n",
|
||||
expr, err)
|
||||
continue
|
||||
}
|
||||
// Ignore errors here as they result from not matching.
|
||||
pe.Extract(expr, matcher, true, content)
|
||||
}
|
||||
}
|
||||
|
||||
t, err := c.tlpParam(r)
|
||||
if err != nil {
|
||||
|
|
@ -194,94 +244,21 @@ func (c *controller) upload(r *http.Request) (interface{}, error) {
|
|||
c.cfg, t,
|
||||
func(folder string, pmd *csaf.ProviderMetadata) error {
|
||||
|
||||
// Load the feed
|
||||
ts := string(t)
|
||||
feedName := "csaf-feed-tlp-" + ts + ".json"
|
||||
|
||||
feed := filepath.Join(folder, feedName)
|
||||
var rolie *csaf.ROLIEFeed
|
||||
if err := func() error {
|
||||
f, err := os.Open(feed)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
rolie, err = csaf.LoadROLIEFeed(f)
|
||||
return err
|
||||
}(); err != nil {
|
||||
// extend the ROLIE feed.
|
||||
if err := c.extendROLIE(folder, newCSAF, t, ex); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
feedURL := csaf.JSONURL(
|
||||
c.cfg.CanonicalURLPrefix +
|
||||
"/.well-known/csaf/" + ts + "/" + feedName)
|
||||
|
||||
tlpLabel := csaf.TLPLabel(strings.ToUpper(ts))
|
||||
|
||||
// Create new if does not exists.
|
||||
if rolie == nil {
|
||||
rolie = &csaf.ROLIEFeed{
|
||||
Feed: csaf.FeedData{
|
||||
ID: "csaf-feed-tlp-" + ts,
|
||||
Title: "CSAF feed (TLP:" + string(tlpLabel) + ")",
|
||||
Link: []csaf.Link{{
|
||||
Rel: "self",
|
||||
HRef: string(feedURL),
|
||||
}},
|
||||
Category: []csaf.ROLIECategory{{
|
||||
Scheme: "urn:ietf:params:rolie:category:information-type",
|
||||
Term: "csaf",
|
||||
}},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
rolie.Feed.Updated = csaf.TimeStamp(time.Now())
|
||||
|
||||
year := strconv.Itoa(ex.InitialReleaseDate.Year())
|
||||
|
||||
csafURL := c.cfg.CanonicalURLPrefix +
|
||||
"/.well-known/csaf/" + ts + "/" + year + "/" + newCSAF
|
||||
|
||||
e := rolie.EntryByID(ex.ID)
|
||||
if e == nil {
|
||||
e = &csaf.Entry{ID: ex.ID}
|
||||
rolie.Feed.Entry = append(rolie.Feed.Entry, e)
|
||||
}
|
||||
|
||||
e.Titel = ex.Title
|
||||
e.Published = csaf.TimeStamp(ex.InitialReleaseDate)
|
||||
e.Updated = csaf.TimeStamp(ex.CurrentReleaseDate)
|
||||
e.Link = []csaf.Link{{
|
||||
Rel: "self",
|
||||
HRef: csafURL,
|
||||
}}
|
||||
e.Format = csaf.Format{
|
||||
Schema: "https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json",
|
||||
Version: "2.0",
|
||||
}
|
||||
e.Content = csaf.Content{
|
||||
Type: "application/json",
|
||||
Src: csafURL,
|
||||
}
|
||||
if ex.Summary != "" {
|
||||
e.Summary = &csaf.Summary{Content: ex.Summary}
|
||||
} else {
|
||||
e.Summary = nil
|
||||
}
|
||||
|
||||
// Sort by descending updated order.
|
||||
rolie.SortEntriesByUpdated()
|
||||
|
||||
// Store the feed
|
||||
if err := util.WriteToFile(feed, rolie); err != nil {
|
||||
// if we have found dynamic categories merge them into
|
||||
// the existing once.
|
||||
if len(dynamicCategories) > 0 {
|
||||
if err := c.mergeCategories(folder, t, dynamicCategories); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Create yearly subfolder
|
||||
year := strconv.Itoa(ex.InitialReleaseDate.Year())
|
||||
|
||||
subDir := filepath.Join(folder, year)
|
||||
|
||||
|
|
@ -302,12 +279,15 @@ func (c *controller) upload(r *http.Request) (interface{}, error) {
|
|||
return err
|
||||
}
|
||||
|
||||
// Only write index.txt and changes.csv if configured.
|
||||
if c.cfg.WriteIndices {
|
||||
if err := updateIndices(
|
||||
folder, filepath.Join(year, newCSAF),
|
||||
ex.CurrentReleaseDate,
|
||||
); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Take over publisher
|
||||
switch {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -11,25 +11,29 @@ package main
|
|||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
)
|
||||
|
||||
const (
|
||||
// The environment name, that contains the path to the config file.
|
||||
configEnv = "CSAF_CONFIG"
|
||||
configPrefix = "/usr/lib/csaf"
|
||||
configPrefix = "/etc/csaf"
|
||||
defaultConfigPath = configPrefix + "/config.toml" // Default path to the config file.
|
||||
defaultOpenPGPPrivateKey = configPrefix + "/openpgp_private.asc"
|
||||
defaultOpenPGPPublicKey = configPrefix + "/openpgp_public.asc"
|
||||
defaultFolder = "/var/www/" // Default folder path.
|
||||
defaultWeb = "/var/www/html" // Default web path.
|
||||
defaultNoWebUI = true
|
||||
defaultUploadLimit = 50 * 1024 * 1024 // Default limit size of the uploaded file.
|
||||
defaultServiceDocument = true
|
||||
)
|
||||
|
||||
type providerMetadataConfig struct {
|
||||
|
|
@ -48,6 +52,7 @@ type config struct {
|
|||
TLPs []tlp `toml:"tlps"`
|
||||
UploadSignature bool `toml:"upload_signature"`
|
||||
CanonicalURLPrefix string `toml:"canonical_url_prefix"`
|
||||
CertificateAndPassword bool `toml:"certificate_and_password"`
|
||||
NoPassphrase bool `toml:"no_passphrase"`
|
||||
NoValidation bool `toml:"no_validation"`
|
||||
NoWebUI bool `toml:"no_web_ui"`
|
||||
|
|
@ -55,6 +60,11 @@ type config struct {
|
|||
ProviderMetaData *providerMetadataConfig `toml:"provider_metadata"`
|
||||
UploadLimit *int64 `toml:"upload_limit"`
|
||||
Issuer *string `toml:"issuer"`
|
||||
RemoteValidator *csaf.RemoteValidatorOptions `toml:"remote_validator"`
|
||||
Categories *[]string `toml:"categories"`
|
||||
ServiceDocument bool `toml:"create_service_document"`
|
||||
WriteIndices bool `toml:"write_indices"`
|
||||
WriteSecurity bool `toml:"write_security"`
|
||||
}
|
||||
|
||||
func (pmdc *providerMetadataConfig) apply(pmd *csaf.ProviderMetadata) {
|
||||
|
|
@ -144,6 +154,68 @@ func (cfg *config) checkPassword(hash string) bool {
|
|||
bcrypt.CompareHashAndPassword([]byte(hash), []byte(*cfg.Password)) == nil
|
||||
}
|
||||
|
||||
// HasCategories tells if categories are configured.
|
||||
func (cfg *config) HasCategories() bool {
|
||||
return cfg.Categories != nil
|
||||
}
|
||||
|
||||
// categoryExprPrefix is the prefix for dynamic categories.
|
||||
const categoryExprPrefix = "expr:"
|
||||
|
||||
// HasDynamicCategories tells if dynamic categories are configured.
|
||||
func (cfg *config) HasDynamicCategories() bool {
|
||||
if !cfg.HasCategories() {
|
||||
return false
|
||||
}
|
||||
for _, cat := range *cfg.Categories {
|
||||
if strings.HasPrefix(cat, categoryExprPrefix) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// HasStaticCategories tells if static categories are configured.
|
||||
func (cfg *config) HasStaticCategories() bool {
|
||||
if !cfg.HasCategories() {
|
||||
return false
|
||||
}
|
||||
for _, cat := range *cfg.Categories {
|
||||
if !strings.HasPrefix(cat, categoryExprPrefix) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// StaticCategories returns a list on the configured static categories.
|
||||
func (cfg *config) StaticCategories() []string {
|
||||
if !cfg.HasCategories() {
|
||||
return nil
|
||||
}
|
||||
cats := make([]string, 0, len(*cfg.Categories))
|
||||
for _, cat := range *cfg.Categories {
|
||||
if !strings.HasPrefix(cat, categoryExprPrefix) {
|
||||
cats = append(cats, cat)
|
||||
}
|
||||
}
|
||||
return cats
|
||||
}
|
||||
|
||||
// DynamicCategories returns a list on the configured dynamic categories.
|
||||
func (cfg *config) DynamicCategories() []string {
|
||||
if !cfg.HasCategories() {
|
||||
return nil
|
||||
}
|
||||
cats := make([]string, 0, len(*cfg.Categories))
|
||||
for _, cat := range *cfg.Categories {
|
||||
if strings.HasPrefix(cat, categoryExprPrefix) {
|
||||
cats = append(cats, cat[len(categoryExprPrefix):])
|
||||
}
|
||||
}
|
||||
return cats
|
||||
}
|
||||
|
||||
// loadConfig extracts the config values from the config file. The path to the
|
||||
// file is taken either from environment variable "CSAF_CONFIG" or from the
|
||||
// defined default path in "defaultConfigPath".
|
||||
|
|
@ -154,11 +226,22 @@ func loadConfig() (*config, error) {
|
|||
if path == "" {
|
||||
path = defaultConfigPath
|
||||
}
|
||||
var cfg config
|
||||
if _, err := toml.DecodeFile(path, &cfg); err != nil {
|
||||
|
||||
// Preset defaults
|
||||
cfg := config{
|
||||
NoWebUI: defaultNoWebUI,
|
||||
ServiceDocument: defaultServiceDocument,
|
||||
}
|
||||
|
||||
md, err := toml.DecodeFile(path, &cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if undecoded := md.Undecoded(); len(undecoded) != 0 {
|
||||
return nil, fmt.Errorf("could not parse %q from config.toml", undecoded)
|
||||
}
|
||||
|
||||
// Preset defaults
|
||||
|
||||
if cfg.OpenPGPPrivateKey == "" {
|
||||
|
|
@ -180,6 +263,14 @@ func loadConfig() (*config, error) {
|
|||
if cfg.CanonicalURLPrefix == "" {
|
||||
cfg.CanonicalURLPrefix = "https://" + os.Getenv("SERVER_NAME")
|
||||
}
|
||||
// Check if canonical url prefix is invalid
|
||||
parsedURL, err := url.ParseRequestURI(cfg.CanonicalURLPrefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if parsedURL.Scheme != "https" && parsedURL.Scheme != "http" {
|
||||
return nil, fmt.Errorf("invalid canonical URL: %q", cfg.CanonicalURLPrefix)
|
||||
}
|
||||
|
||||
if cfg.TLPs == nil {
|
||||
cfg.TLPs = []tlp{tlpCSAF, tlpWhite, tlpGreen, tlpAmber, tlpRed}
|
||||
|
|
@ -192,7 +283,7 @@ func loadConfig() (*config, error) {
|
|||
if cfg.ProviderMetaData.Publisher == nil {
|
||||
cfg.ProviderMetaData.Publisher = &csaf.Publisher{
|
||||
Category: func(c csaf.Category) *csaf.Category { return &c }(csaf.CSAFCategoryVendor),
|
||||
Name: func(s string) *string { return &s }("ACME"),
|
||||
Name: func(s string) *string { return &s }("Example Company"),
|
||||
Namespace: func(s string) *string { return &s }("https://example.com"),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -72,13 +72,9 @@ func (c *controller) bind(pim *pathInfoMux) {
|
|||
pim.handleFunc("/api/create", c.auth(api(c.create)))
|
||||
}
|
||||
|
||||
// auth wraps the given http.HandlerFunc and returns an new one after authenticating the
|
||||
// password contained in the header "X-CSAF-PROVIDER-AUTH" with the "password" config value
|
||||
// if set, otherwise returns the given http.HandlerFunc.
|
||||
func (c *controller) auth(
|
||||
fn func(http.ResponseWriter, *http.Request),
|
||||
) func(http.ResponseWriter, *http.Request) {
|
||||
return func(rw http.ResponseWriter, r *http.Request) {
|
||||
// authenticate checks if the incoming request conforms with the
|
||||
// configured authentication mechanism.
|
||||
func (c *controller) authenticate(r *http.Request) bool {
|
||||
|
||||
verify := os.Getenv("SSL_CLIENT_VERIFY")
|
||||
log.Printf("SSL_CLIENT_VERIFY: %s\n", verify)
|
||||
|
|
@ -90,19 +86,44 @@ func (c *controller) auth(
|
|||
log.Printf("SSL_CLIENT_I_DN: %s\n", os.Getenv("SSL_CLIENT_I_DN"))
|
||||
}
|
||||
|
||||
checkCert := func() bool {
|
||||
return verify == "SUCCESS" && (c.cfg.Issuer == nil || *c.cfg.Issuer == os.Getenv("SSL_CLIENT_I_DN"))
|
||||
}
|
||||
|
||||
checkPassword := func() bool {
|
||||
return c.cfg.checkPassword(r.Header.Get("X-CSAF-PROVIDER-AUTH"))
|
||||
}
|
||||
|
||||
if c.cfg.CertificateAndPassword {
|
||||
if c.cfg.Password == nil {
|
||||
log.Println("No password set, declining access.")
|
||||
return false
|
||||
}
|
||||
log.Printf("user: %s\n", os.Getenv("SSL_CLIENT_S_DN"))
|
||||
return checkPassword() && checkCert()
|
||||
}
|
||||
|
||||
switch {
|
||||
case verify == "SUCCESS" && (c.cfg.Issuer == nil || *c.cfg.Issuer == os.Getenv("SSL_CLIENT_I_DN")):
|
||||
case checkCert():
|
||||
log.Printf("user: %s\n", os.Getenv("SSL_CLIENT_S_DN"))
|
||||
case c.cfg.Password == nil:
|
||||
log.Println("No password set, declining access.")
|
||||
http.Error(rw, http.StatusText(http.StatusForbidden), http.StatusForbidden)
|
||||
return
|
||||
return false
|
||||
default:
|
||||
if pa := r.Header.Get("X-CSAF-PROVIDER-AUTH"); !c.cfg.checkPassword(pa) {
|
||||
return checkPassword()
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// auth is a middleware to decorate endpoints with authentication.
|
||||
func (c *controller) auth(
|
||||
fn func(http.ResponseWriter, *http.Request),
|
||||
) func(http.ResponseWriter, *http.Request) {
|
||||
return func(rw http.ResponseWriter, r *http.Request) {
|
||||
if !c.authenticate(r) {
|
||||
http.Error(rw, http.StatusText(http.StatusForbidden), http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
}
|
||||
fn(rw, r)
|
||||
}
|
||||
}
|
||||
|
|
@ -110,7 +131,7 @@ func (c *controller) auth(
|
|||
// render sets the headers for the response. It applies the given template "tmpl" to
|
||||
// the given object "arg" and writes the output to http.ResponseWriter.
|
||||
// It logs a warning in case of error.
|
||||
func (c *controller) render(rw http.ResponseWriter, tmpl string, arg interface{}) {
|
||||
func (c *controller) render(rw http.ResponseWriter, tmpl string, arg any) {
|
||||
rw.Header().Set("Content-type", "text/html; charset=utf-8")
|
||||
rw.Header().Set("X-Content-Type-Options", "nosniff")
|
||||
if err := c.tmpl.ExecuteTemplate(rw, tmpl, arg); err != nil {
|
||||
|
|
@ -121,13 +142,13 @@ func (c *controller) render(rw http.ResponseWriter, tmpl string, arg interface{}
|
|||
// failed constructs the error messages by calling "asMultiError" and calls "render"
|
||||
// function to render the passed template and error object.
|
||||
func (c *controller) failed(rw http.ResponseWriter, tmpl string, err error) {
|
||||
result := map[string]interface{}{"Error": asMultiError(err)}
|
||||
result := map[string]any{"Error": asMultiError(err)}
|
||||
c.render(rw, tmpl, result)
|
||||
}
|
||||
|
||||
// index calls the "render" function and passes the "index.html" and c.cfg to it.
|
||||
func (c *controller) index(rw http.ResponseWriter, r *http.Request) {
|
||||
c.render(rw, "index.html", map[string]interface{}{
|
||||
func (c *controller) index(rw http.ResponseWriter, _ *http.Request) {
|
||||
c.render(rw, "index.html", map[string]any{
|
||||
"Config": c.cfg,
|
||||
})
|
||||
}
|
||||
|
|
@ -137,7 +158,7 @@ func (c *controller) index(rw http.ResponseWriter, r *http.Request) {
|
|||
// in case of no error occurred, otherwise calls the "failed" function and passes the given
|
||||
// template and the error from "fn".
|
||||
func (c *controller) web(
|
||||
fn func(*http.Request) (interface{}, error),
|
||||
fn func(*http.Request) (any, error),
|
||||
tmpl string,
|
||||
) func(http.ResponseWriter, *http.Request) {
|
||||
|
||||
|
|
@ -152,8 +173,8 @@ func (c *controller) web(
|
|||
|
||||
// writeJSON sets the header for the response and writes the JSON encoding of the given "content".
|
||||
// It logs out an error message in case of an error.
|
||||
func writeJSON(rw http.ResponseWriter, content interface{}, code int) {
|
||||
rw.Header().Set("Content-type", "application/json; charset=utf-8")
|
||||
func writeJSON(rw http.ResponseWriter, content any, code int) {
|
||||
rw.Header().Set("Content-type", "application/json")
|
||||
rw.Header().Set("X-Content-Type-Options", "nosniff")
|
||||
rw.WriteHeader(code)
|
||||
if err := json.NewEncoder(rw).Encode(content); err != nil {
|
||||
|
|
@ -161,7 +182,7 @@ func writeJSON(rw http.ResponseWriter, content interface{}, code int) {
|
|||
}
|
||||
}
|
||||
|
||||
func errorToContent(err error) interface{} {
|
||||
func errorToContent(err error) any {
|
||||
return &struct {
|
||||
Errors multiError `json:"errors"`
|
||||
}{
|
||||
|
|
@ -170,7 +191,7 @@ func errorToContent(err error) interface{} {
|
|||
}
|
||||
|
||||
func api(
|
||||
fn func(*http.Request) (interface{}, error),
|
||||
fn func(*http.Request) (any, error),
|
||||
) func(http.ResponseWriter, *http.Request) {
|
||||
|
||||
return func(rw http.ResponseWriter, r *http.Request) {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -14,13 +14,16 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
// ensureFolders initializes the paths and call functions to create
|
||||
|
|
@ -33,6 +36,7 @@ func ensureFolders(c *config) error {
|
|||
for _, create := range []func(*config, string) error{
|
||||
createWellknown,
|
||||
createFeedFolders,
|
||||
createService,
|
||||
createOpenPGPFolder,
|
||||
createProviderMetadata,
|
||||
} {
|
||||
|
|
@ -41,7 +45,13 @@ func ensureFolders(c *config) error {
|
|||
}
|
||||
}
|
||||
|
||||
return setupSecurity(c, wellknown)
|
||||
// Only write/modify security.txt if configured.
|
||||
if c.WriteSecurity {
|
||||
if err := setupSecurity(c, wellknown); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// createWellknown creates ".well-known" directory if not exist and returns nil.
|
||||
|
|
@ -60,11 +70,66 @@ func createWellknown(_ *config, wellknown string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// createService creates the ROLIE service document (if configured).
|
||||
func createService(c *config, wellknownCSAF string) error {
|
||||
// no service document needed.
|
||||
if !c.ServiceDocument {
|
||||
return nil
|
||||
}
|
||||
|
||||
categories := csaf.ROLIEServiceWorkspaceCollectionCategories{
|
||||
Category: []csaf.ROLIEServiceWorkspaceCollectionCategoriesCategory{{
|
||||
Scheme: "urn:ietf:params:rolie:category:information-type",
|
||||
Term: "csaf",
|
||||
}},
|
||||
}
|
||||
|
||||
var collections []csaf.ROLIEServiceWorkspaceCollection
|
||||
|
||||
for _, t := range c.TLPs {
|
||||
if t == tlpCSAF {
|
||||
continue
|
||||
}
|
||||
ts := string(t)
|
||||
feedName := "csaf-feed-tlp-" + ts + ".json"
|
||||
href := c.CanonicalURLPrefix +
|
||||
"/.well-known/csaf/" + ts + "/" + feedName
|
||||
|
||||
collection := csaf.ROLIEServiceWorkspaceCollection{
|
||||
Title: "CSAF feed (TLP:" + strings.ToUpper(ts) + ")",
|
||||
HRef: href,
|
||||
Categories: categories,
|
||||
}
|
||||
collections = append(collections, collection)
|
||||
}
|
||||
|
||||
rsd := &csaf.ROLIEServiceDocument{
|
||||
Service: csaf.ROLIEService{
|
||||
Workspace: []csaf.ROLIEServiceWorkspace{{
|
||||
Title: "CSAF feeds",
|
||||
Collection: collections,
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
path := filepath.Join(wellknownCSAF, "service.json")
|
||||
return util.WriteToFile(path, rsd)
|
||||
}
|
||||
|
||||
// createFeedFolders creates the feed folders according to the tlp values
|
||||
// in the "tlps" config option if they do not already exist.
|
||||
// No creation for the "csaf" option will be done.
|
||||
// It creates also symbolic links to feed folders.
|
||||
func createFeedFolders(c *config, wellknown string) error {
|
||||
|
||||
// If we have static configured categories we need to create
|
||||
// the category documents.
|
||||
var catDoc *csaf.ROLIECategoryDocument
|
||||
|
||||
if categories := c.StaticCategories(); len(categories) > 0 {
|
||||
catDoc = csaf.NewROLIECategoryDocument(categories...)
|
||||
}
|
||||
|
||||
for _, t := range c.TLPs {
|
||||
if t == tlpCSAF {
|
||||
continue
|
||||
|
|
@ -83,10 +148,63 @@ func createFeedFolders(c *config, wellknown string) error {
|
|||
return err
|
||||
}
|
||||
}
|
||||
// Store the category document.
|
||||
if catDoc != nil {
|
||||
catPath := path.Join(tlpLink, "category-"+string(t)+".json")
|
||||
if err := util.WriteToFile(catPath, catDoc); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// Create an empty ROLIE feed document
|
||||
if err := createROLIEfeed(c, t, tlpLink); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// createROLIEfeed creates an empty ROLIE feed
|
||||
func createROLIEfeed(c *config, t tlp, folder string) error {
|
||||
ts := string(t)
|
||||
feedName := "csaf-feed-tlp-" + ts + ".json"
|
||||
|
||||
feed := filepath.Join(folder, feedName)
|
||||
|
||||
feedURL := csaf.JSONURL(
|
||||
c.CanonicalURLPrefix +
|
||||
"/.well-known/csaf/" + ts + "/" + feedName)
|
||||
|
||||
tlpLabel := csaf.TLPLabel(strings.ToUpper(ts))
|
||||
|
||||
links := []csaf.Link{{
|
||||
Rel: "self",
|
||||
HRef: string(feedURL),
|
||||
}}
|
||||
// If we have a service document we need to link it.
|
||||
if c.ServiceDocument {
|
||||
links = append(links, csaf.Link{
|
||||
Rel: "service",
|
||||
HRef: c.CanonicalURLPrefix + "/.well-known/csaf/service.json",
|
||||
})
|
||||
}
|
||||
rolie := &csaf.ROLIEFeed{
|
||||
Feed: csaf.FeedData{
|
||||
ID: "csaf-feed-tlp-" + ts,
|
||||
Title: "CSAF feed (TLP:" + string(tlpLabel) + ")",
|
||||
Link: links,
|
||||
Category: []csaf.ROLIECategory{{
|
||||
Scheme: "urn:ietf:params:rolie:category:information-type",
|
||||
Term: "csaf",
|
||||
}},
|
||||
Updated: csaf.TimeStamp(time.Now().UTC()),
|
||||
Entry: []*csaf.Entry{},
|
||||
},
|
||||
}
|
||||
|
||||
return util.WriteToFile(feed, rolie)
|
||||
|
||||
}
|
||||
|
||||
// createOpenPGPFolder creates an openpgp folder besides
|
||||
// the provider-metadata.json in the csaf folder.
|
||||
func createOpenPGPFolder(c *config, wellknown string) error {
|
||||
|
|
@ -228,6 +346,17 @@ func createProviderMetadata(c *config, wellknownCSAF string) error {
|
|||
pm := csaf.NewProviderMetadataDomain(c.CanonicalURLPrefix, c.modelTLPs())
|
||||
c.ProviderMetaData.apply(pm)
|
||||
|
||||
// We have directory based distributions.
|
||||
if c.WriteIndices {
|
||||
// Every TLP as a distribution?
|
||||
for _, t := range c.TLPs {
|
||||
if t != tlpCSAF {
|
||||
pm.AddDirectoryDistribution(
|
||||
c.CanonicalURLPrefix + "/.well-known/csaf/" + string(t))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
key, err := loadCryptoKeyFromFile(c.OpenPGPPublicKey)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot load public key: %v", err)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -13,7 +13,7 @@ import (
|
|||
"crypto/sha512"
|
||||
"os"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
func writeHashedFile(fname, name string, data []byte, armored string) error {
|
||||
|
|
@ -30,8 +30,5 @@ func writeHashedFile(fname, name string, data []byte, armored string) error {
|
|||
return err
|
||||
}
|
||||
// Write signature.
|
||||
if err := os.WriteFile(fname+".asc", []byte(armored), 0644); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
return os.WriteFile(fname+".asc", []byte(armored), 0644)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -17,6 +17,8 @@ import (
|
|||
"path/filepath"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
func updateIndex(dir, fname string) error {
|
||||
|
|
@ -74,6 +76,11 @@ func updateChanges(dir, fname string, releaseDate time.Time) error {
|
|||
path string
|
||||
}
|
||||
|
||||
const (
|
||||
pathColumn = 0
|
||||
timeColumn = 1
|
||||
)
|
||||
|
||||
changes := filepath.Join(dir, "changes.csv")
|
||||
|
||||
chs, err := func() ([]change, error) {
|
||||
|
|
@ -99,9 +106,9 @@ func updateChanges(dir, fname string, releaseDate time.Time) error {
|
|||
return nil, err
|
||||
}
|
||||
// Check if new is already in.
|
||||
if record[1] == fname {
|
||||
if record[pathColumn] == fname {
|
||||
// Identical -> no change at all.
|
||||
if record[0] == releaseDate.Format(dateFormat) {
|
||||
if record[timeColumn] == releaseDate.Format(dateFormat) {
|
||||
return nil, nil
|
||||
}
|
||||
// replace old entry
|
||||
|
|
@ -109,11 +116,11 @@ func updateChanges(dir, fname string, releaseDate time.Time) error {
|
|||
chs = append(chs, change{releaseDate, fname})
|
||||
continue
|
||||
}
|
||||
t, err := time.Parse(dateFormat, record[0])
|
||||
t, err := time.Parse(dateFormat, record[timeColumn])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
chs = append(chs, change{t, record[1]})
|
||||
chs = append(chs, change{t, record[pathColumn]})
|
||||
}
|
||||
if !replaced {
|
||||
chs = append(chs, change{releaseDate, fname})
|
||||
|
|
@ -136,11 +143,11 @@ func updateChanges(dir, fname string, releaseDate time.Time) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c := csv.NewWriter(o)
|
||||
c := util.NewFullyQuotedCSWWriter(o)
|
||||
record := make([]string, 2)
|
||||
for _, ch := range chs {
|
||||
record[0] = ch.time.Format(dateFormat)
|
||||
record[1] = ch.path
|
||||
record[timeColumn] = ch.time.Format(dateFormat)
|
||||
record[pathColumn] = ch.path
|
||||
if err := c.Write(record); err != nil {
|
||||
o.Close()
|
||||
return err
|
||||
|
|
|
|||
|
|
@ -1,11 +1,12 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
||||
// Package main implements the csaf_provider.
|
||||
package main
|
||||
|
||||
import (
|
||||
|
|
@ -13,15 +14,27 @@ import (
|
|||
"log"
|
||||
"net/http"
|
||||
"net/http/cgi"
|
||||
"os"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/jessevdk/go-flags"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
type options struct {
|
||||
Version bool `long:"version" description:"Display version of the binary"`
|
||||
}
|
||||
|
||||
const cgiRequired = "The csaf_provider is a cgi binary and is designed to be served via a web server."
|
||||
|
||||
func ensureCGI() {
|
||||
if _, ok := os.LookupEnv("REQUEST_METHOD"); !ok {
|
||||
fmt.Println(cgiRequired)
|
||||
fmt.Println("Version: " + util.SemVersion)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
var opts options
|
||||
parser := flags.NewParser(&opts, flags.Default)
|
||||
|
|
@ -31,10 +44,13 @@ func main() {
|
|||
return
|
||||
}
|
||||
|
||||
ensureCGI()
|
||||
|
||||
cfg, err := loadConfig()
|
||||
if err != nil {
|
||||
cgi.Serve(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
http.Error(rw, fmt.Sprintf("Config error: %v\n", err), http.StatusInternalServerError)
|
||||
cgi.Serve(http.HandlerFunc(func(rw http.ResponseWriter, _ *http.Request) {
|
||||
http.Error(rw, "Something went wrong. Check server logs for more details",
|
||||
http.StatusInternalServerError)
|
||||
}))
|
||||
log.Fatalf("error: %v\n", err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
|
|||
173
cmd/csaf_provider/rolie.go
Normal file
173
cmd/csaf_provider/rolie.go
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
// mergeCategories merges the given categories into the old ones.
|
||||
func (c *controller) mergeCategories(
|
||||
folder string,
|
||||
t tlp,
|
||||
categories []string,
|
||||
) error {
|
||||
ts := string(t)
|
||||
catName := "category-" + ts + ".json"
|
||||
catPath := filepath.Join(folder, catName)
|
||||
|
||||
catDoc, err := loadCategoryDocument(catPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var changed bool
|
||||
|
||||
if catDoc == nil {
|
||||
catDoc = csaf.NewROLIECategoryDocument(categories...)
|
||||
changed = true
|
||||
} else {
|
||||
changed = catDoc.Merge(categories...)
|
||||
}
|
||||
|
||||
if changed {
|
||||
if err := util.WriteToFile(catPath, catDoc); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadROLIEFeed loads a ROLIE feed from file if its exists.
|
||||
// Returns nil if the file does not exists.
|
||||
func loadCategoryDocument(path string) (*csaf.ROLIECategoryDocument, error) {
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
return csaf.LoadROLIECategoryDocument(f)
|
||||
}
|
||||
|
||||
// extendROLIE adds a new entry to the ROLIE feed for a given advisory.
|
||||
func (c *controller) extendROLIE(
|
||||
folder string,
|
||||
newCSAF string,
|
||||
t tlp,
|
||||
ex *csaf.AdvisorySummary,
|
||||
) error {
|
||||
// Load the feed
|
||||
ts := string(t)
|
||||
feedName := "csaf-feed-tlp-" + ts + ".json"
|
||||
|
||||
feed := filepath.Join(folder, feedName)
|
||||
rolie, err := loadROLIEFeed(feed)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
feedURL := csaf.JSONURL(
|
||||
c.cfg.CanonicalURLPrefix +
|
||||
"/.well-known/csaf/" + ts + "/" + feedName)
|
||||
|
||||
tlpLabel := csaf.TLPLabel(strings.ToUpper(ts))
|
||||
|
||||
// Create new if does not exists.
|
||||
if rolie == nil {
|
||||
links := []csaf.Link{{
|
||||
Rel: "self",
|
||||
HRef: string(feedURL),
|
||||
}}
|
||||
// If we have a service document we need to link it.
|
||||
if c.cfg.ServiceDocument {
|
||||
links = append(links, csaf.Link{
|
||||
Rel: "service",
|
||||
HRef: c.cfg.CanonicalURLPrefix + "/.well-known/csaf/service.json",
|
||||
})
|
||||
}
|
||||
rolie = &csaf.ROLIEFeed{
|
||||
Feed: csaf.FeedData{
|
||||
ID: "csaf-feed-tlp-" + ts,
|
||||
Title: "CSAF feed (TLP:" + string(tlpLabel) + ")",
|
||||
Link: links,
|
||||
Category: []csaf.ROLIECategory{{
|
||||
Scheme: "urn:ietf:params:rolie:category:information-type",
|
||||
Term: "csaf",
|
||||
}},
|
||||
Entry: []*csaf.Entry{},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
rolie.Feed.Updated = csaf.TimeStamp(time.Now().UTC())
|
||||
|
||||
year := strconv.Itoa(ex.InitialReleaseDate.Year())
|
||||
|
||||
csafURL := c.cfg.CanonicalURLPrefix +
|
||||
"/.well-known/csaf/" + ts + "/" + year + "/" + newCSAF
|
||||
|
||||
e := rolie.EntryByID(ex.ID)
|
||||
if e == nil {
|
||||
e = &csaf.Entry{ID: ex.ID}
|
||||
rolie.Feed.Entry = append(rolie.Feed.Entry, e)
|
||||
}
|
||||
|
||||
e.Titel = ex.Title
|
||||
e.Published = csaf.TimeStamp(ex.InitialReleaseDate)
|
||||
e.Updated = csaf.TimeStamp(ex.CurrentReleaseDate)
|
||||
e.Link = []csaf.Link{
|
||||
{Rel: "self", HRef: csafURL},
|
||||
{Rel: "hash", HRef: csafURL + ".sha256"},
|
||||
{Rel: "hash", HRef: csafURL + ".sha512"},
|
||||
{Rel: "signature", HRef: csafURL + ".asc"},
|
||||
}
|
||||
e.Format = csaf.Format{
|
||||
Schema: "https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json",
|
||||
Version: "2.0",
|
||||
}
|
||||
e.Content = csaf.Content{
|
||||
Type: "application/json",
|
||||
Src: csafURL,
|
||||
}
|
||||
if ex.Summary != "" {
|
||||
e.Summary = &csaf.Summary{Content: ex.Summary}
|
||||
} else {
|
||||
e.Summary = nil
|
||||
}
|
||||
|
||||
// Sort by descending updated order.
|
||||
rolie.SortEntriesByUpdated()
|
||||
|
||||
// Store the feed
|
||||
return util.WriteToFile(feed, rolie)
|
||||
}
|
||||
|
||||
// loadROLIEFeed loads a ROLIE feed from file if its exists.
|
||||
// Returns nil if the file does not exists.
|
||||
func loadROLIEFeed(feed string) (*csaf.ROLIEFeed, error) {
|
||||
f, err := os.Open(feed)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
return csaf.LoadROLIEFeed(f)
|
||||
}
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
<!--
|
||||
This file is Free Software under the MIT License
|
||||
without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
This file is Free Software under the Apache-2.0 License
|
||||
without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
|
||||
SPDX-License-Identifier: MIT
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
<!--
|
||||
This file is Free Software under the MIT License
|
||||
without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
This file is Free Software under the Apache-2.0 License
|
||||
without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
|
||||
SPDX-License-Identifier: MIT
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
<!--
|
||||
This file is Free Software under the MIT License
|
||||
without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
This file is Free Software under the Apache-2.0 License
|
||||
without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
|
||||
SPDX-License-Identifier: MIT
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -12,8 +12,8 @@ import (
|
|||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
func doTransaction(
|
||||
|
|
|
|||
191
cmd/csaf_uploader/config.go
Normal file
191
cmd/csaf_uploader/config.go
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
"golang.org/x/term"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/certs"
|
||||
"github.com/gocsaf/csaf/v3/internal/options"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultURL = "https://localhost/cgi-bin/csaf_provider.go"
|
||||
defaultAction = "upload"
|
||||
defaultTLP = "csaf"
|
||||
)
|
||||
|
||||
// The supported flag config of the uploader command line
|
||||
type config struct {
|
||||
//lint:ignore SA5008 We are using choice twice: upload, create.
|
||||
Action string `short:"a" long:"action" choice:"upload" choice:"create" description:"Action to perform" toml:"action"`
|
||||
URL string `short:"u" long:"url" description:"URL of the CSAF provider" value-name:"URL" toml:"url"`
|
||||
//lint:ignore SA5008 We are using choice many times: csaf, white, green, amber, red.
|
||||
TLP string `short:"t" long:"tlp" choice:"csaf" choice:"white" choice:"green" choice:"amber" choice:"red" description:"TLP of the feed" toml:"tlp"`
|
||||
ExternalSigned bool `short:"x" long:"external_signed" description:"CSAF files are signed externally. Assumes .asc files beside CSAF files." toml:"external_signed"`
|
||||
NoSchemaCheck bool `short:"s" long:"no_schema_check" description:"Do not check files against CSAF JSON schema locally." toml:"no_schema_check"`
|
||||
|
||||
Key *string `short:"k" long:"key" description:"OpenPGP key to sign the CSAF files" value-name:"KEY-FILE" toml:"key"`
|
||||
Password *string `short:"p" long:"password" description:"Authentication password for accessing the CSAF provider" value-name:"PASSWORD" toml:"password"`
|
||||
Passphrase *string `short:"P" long:"passphrase" description:"Passphrase to unlock the OpenPGP key" value-name:"PASSPHRASE" toml:"passphrase"`
|
||||
ClientCert *string `long:"client_cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE.crt" toml:"client_cert"`
|
||||
ClientKey *string `long:"client_key" description:"TLS client private key file (PEM encoded data)" value-name:"KEY-FILE.pem" toml:"client_key"`
|
||||
ClientPassphrase *string `long:"client_passphrase" description:"Optional passphrase for the client cert (limited, experimental, see downloader doc)" value-name:"PASSPHRASE" toml:"client_passphrase"`
|
||||
|
||||
PasswordInteractive bool `short:"i" long:"password_interactive" description:"Enter password interactively" toml:"password_interactive"`
|
||||
PassphraseInteractive bool `short:"I" long:"passphrase_interactive" description:"Enter OpenPGP key passphrase interactively" toml:"passphrase_interactive"`
|
||||
|
||||
Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider" toml:"insecure"`
|
||||
|
||||
Config string `short:"c" long:"config" description:"Path to config TOML file" value-name:"TOML-FILE" toml:"-"`
|
||||
Version bool `long:"version" description:"Display version of the binary" toml:"-"`
|
||||
|
||||
clientCerts []tls.Certificate
|
||||
cachedAuth string
|
||||
keyRing *crypto.KeyRing
|
||||
}
|
||||
|
||||
// iniPaths are the potential file locations of the the config file.
|
||||
var configPaths = []string{
|
||||
"~/.config/csaf/uploader.toml",
|
||||
"~/.csaf_uploader.toml",
|
||||
"csaf_uploader.toml",
|
||||
}
|
||||
|
||||
// parseArgsConfig parses the command line and if need a config file.
|
||||
func parseArgsConfig() ([]string, *config, error) {
|
||||
p := options.Parser[config]{
|
||||
DefaultConfigLocations: configPaths,
|
||||
ConfigLocation: func(cfg *config) string { return cfg.Config },
|
||||
Usage: "[OPTIONS] advisories...",
|
||||
HasVersion: func(cfg *config) bool { return cfg.Version },
|
||||
SetDefaults: func(cfg *config) {
|
||||
cfg.URL = defaultURL
|
||||
cfg.Action = defaultAction
|
||||
cfg.TLP = defaultTLP
|
||||
},
|
||||
// Re-establish default values if not set.
|
||||
EnsureDefaults: func(cfg *config) {
|
||||
if cfg.URL == "" {
|
||||
cfg.URL = defaultURL
|
||||
}
|
||||
if cfg.Action == "" {
|
||||
cfg.Action = defaultAction
|
||||
}
|
||||
if cfg.TLP == "" {
|
||||
cfg.TLP = defaultTLP
|
||||
}
|
||||
},
|
||||
}
|
||||
return p.Parse()
|
||||
}
|
||||
|
||||
// prepareCertificates loads the client side certificates used by the HTTP client.
|
||||
func (cfg *config) prepareCertificates() error {
|
||||
cert, err := certs.LoadCertificate(
|
||||
cfg.ClientCert, cfg.ClientKey, cfg.ClientPassphrase)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.clientCerts = cert
|
||||
return nil
|
||||
}
|
||||
|
||||
// readInteractive prints a message to command line and retrieves the password from it.
|
||||
func readInteractive(prompt string, pw **string) error {
|
||||
fmt.Print(prompt)
|
||||
p, err := term.ReadPassword(int(os.Stdin.Fd()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ps := string(p)
|
||||
*pw = &ps
|
||||
return nil
|
||||
}
|
||||
|
||||
// prepareInteractive prompts for interactive passwords.
|
||||
func (cfg *config) prepareInteractive() error {
|
||||
if cfg.PasswordInteractive {
|
||||
if err := readInteractive("Enter auth password: ", &cfg.Password); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if cfg.PassphraseInteractive {
|
||||
if err := readInteractive("Enter OpenPGP passphrase: ", &cfg.Passphrase); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadOpenPGPKey loads an OpenPGP key.
|
||||
func loadOpenPGPKey(filename string) (*crypto.Key, error) {
|
||||
f, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
return crypto.NewKeyFromArmoredReader(f)
|
||||
}
|
||||
|
||||
// prepareOpenPGPKey loads the configured OpenPGP key.
|
||||
func (cfg *config) prepareOpenPGPKey() error {
|
||||
if cfg.Action != "upload" || cfg.Key == nil {
|
||||
return nil
|
||||
}
|
||||
if cfg.ExternalSigned {
|
||||
return errors.New("refused to sign external signed files")
|
||||
}
|
||||
key, err := loadOpenPGPKey(*cfg.Key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if cfg.Passphrase != nil {
|
||||
if key, err = key.Unlock([]byte(*cfg.Passphrase)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
cfg.keyRing, err = crypto.NewKeyRing(key)
|
||||
return err
|
||||
}
|
||||
|
||||
// preparePassword pre-calculates the auth header.
|
||||
func (cfg *config) preparePassword() error {
|
||||
if cfg.Password != nil {
|
||||
hash, err := bcrypt.GenerateFromPassword(
|
||||
[]byte(*cfg.Password), bcrypt.DefaultCost)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.cachedAuth = string(hash)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// prepare prepares internal state of a loaded configuration.
|
||||
func (cfg *config) prepare() error {
|
||||
for _, prepare := range []func(*config) error{
|
||||
(*config).prepareCertificates,
|
||||
(*config).prepareInteractive,
|
||||
(*config).prepareOpenPGPKey,
|
||||
(*config).preparePassword,
|
||||
} {
|
||||
if err := prepare(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -9,407 +9,12 @@
|
|||
// Implements a command line tool that uploads csaf documents to csaf_provider.
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
"github.com/csaf-poc/csaf_distribution/csaf"
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/jessevdk/go-flags"
|
||||
"github.com/mitchellh/go-homedir"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
"golang.org/x/term"
|
||||
)
|
||||
|
||||
// The supported flag options of the uploader command line
|
||||
type options struct {
|
||||
Action string `short:"a" long:"action" choice:"upload" choice:"create" default:"upload" description:"Action to perform"`
|
||||
URL string `short:"u" long:"url" description:"URL of the CSAF provider" default:"https://localhost/cgi-bin/csaf_provider.go" value-name:"URL"`
|
||||
TLP string `short:"t" long:"tlp" choice:"csaf" choice:"white" choice:"green" choice:"amber" choice:"red" default:"csaf" description:"TLP of the feed"`
|
||||
ExternalSigned bool `short:"x" long:"external-signed" description:"CSAF files are signed externally. Assumes .asc files beside CSAF files."`
|
||||
NoSchemaCheck bool `short:"s" long:"no-schema-check" description:"Do not check files against CSAF JSON schema locally."`
|
||||
|
||||
Key *string `short:"k" long:"key" description:"OpenPGP key to sign the CSAF files" value-name:"KEY-FILE"`
|
||||
Password *string `short:"p" long:"password" description:"Authentication password for accessing the CSAF provider" value-name:"PASSWORD"`
|
||||
Passphrase *string `short:"P" long:"passphrase" description:"Passphrase to unlock the OpenPGP key" value-name:"PASSPHRASE"`
|
||||
ClientCert *string `long:"client-cert" description:"TLS client certificate file (PEM encoded data)" value-name:"CERT-FILE.crt"`
|
||||
ClientKey *string `long:"client-key" description:"TLS client private key file (PEM encoded data)" value-name:"KEY-FILE.pem"`
|
||||
|
||||
PasswordInteractive bool `short:"i" long:"password-interactive" description:"Enter password interactively" no-ini:"true"`
|
||||
PassphraseInteractive bool `short:"I" long:"passphrase-interactive" description:"Enter OpenPGP key passphrase interactively" no-ini:"true"`
|
||||
|
||||
Insecure bool `long:"insecure" description:"Do not check TLS certificates from provider"`
|
||||
|
||||
Config *string `short:"c" long:"config" description:"Path to config ini file" value-name:"INI-FILE" no-ini:"true"`
|
||||
Version bool `long:"version" description:"Display version of the binary"`
|
||||
}
|
||||
|
||||
type processor struct {
|
||||
opts *options
|
||||
cachedAuth string
|
||||
keyRing *crypto.KeyRing
|
||||
}
|
||||
|
||||
// iniPaths are the potential file locations of the the config file.
|
||||
var iniPaths = []string{
|
||||
"~/.config/csaf/uploader.ini",
|
||||
"~/.csaf_uploader.ini",
|
||||
"csaf_uploader.ini",
|
||||
}
|
||||
|
||||
// loadKey loads an OpenPGP key.
|
||||
func loadKey(filename string) (*crypto.Key, error) {
|
||||
f, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
return crypto.NewKeyFromArmoredReader(f)
|
||||
}
|
||||
|
||||
func newProcessor(opts *options) (*processor, error) {
|
||||
p := processor{
|
||||
opts: opts,
|
||||
}
|
||||
|
||||
if opts.Action == "upload" {
|
||||
if opts.Key != nil {
|
||||
if opts.ExternalSigned {
|
||||
return nil, errors.New("refused to sign external signed files")
|
||||
}
|
||||
var err error
|
||||
var key *crypto.Key
|
||||
if key, err = loadKey(*opts.Key); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if opts.Passphrase != nil {
|
||||
if key, err = key.Unlock([]byte(*opts.Passphrase)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if p.keyRing, err = crypto.NewKeyRing(key); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// pre-calc the auth header
|
||||
if opts.Password != nil {
|
||||
hash, err := bcrypt.GenerateFromPassword(
|
||||
[]byte(*opts.Password), bcrypt.DefaultCost)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p.cachedAuth = string(hash)
|
||||
}
|
||||
|
||||
return &p, nil
|
||||
}
|
||||
|
||||
// httpClient initializes the http.Client according to the "Insecure" flag
|
||||
// and the TLS client files for authentication and returns it.
|
||||
func (p *processor) httpClient() *http.Client {
|
||||
var client http.Client
|
||||
var tlsConfig tls.Config
|
||||
|
||||
if p.opts.Insecure {
|
||||
tlsConfig.InsecureSkipVerify = true
|
||||
}
|
||||
|
||||
if p.opts.ClientCert != nil && p.opts.ClientKey != nil {
|
||||
cert, err := tls.LoadX509KeyPair(*p.opts.ClientCert, *p.opts.ClientKey)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tlsConfig.Certificates = []tls.Certificate{cert}
|
||||
}
|
||||
|
||||
client.Transport = &http.Transport{
|
||||
TLSClientConfig: &tlsConfig,
|
||||
}
|
||||
|
||||
return &client
|
||||
}
|
||||
|
||||
// writeStrings prints the passed messages under the specific passed header.
|
||||
func writeStrings(header string, messages []string) {
|
||||
if len(messages) > 0 {
|
||||
fmt.Println(header)
|
||||
for _, msg := range messages {
|
||||
fmt.Printf("\t%s\n", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// create sends an request to create the initial files and directories
|
||||
// on the server. It prints the response messages.
|
||||
func (p *processor) create() error {
|
||||
req, err := http.NewRequest(http.MethodGet, p.opts.URL+"/api/create", nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Set("X-CSAF-PROVIDER-AUTH", p.cachedAuth)
|
||||
|
||||
resp, err := p.httpClient().Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
log.Printf("Create failed: %s\n", resp.Status)
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Message string `json:"message"`
|
||||
Errors []string `json:"errors"`
|
||||
}
|
||||
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if result.Message != "" {
|
||||
fmt.Printf("\t%s\n", result.Message)
|
||||
}
|
||||
|
||||
writeStrings("Errors:", result.Errors)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// uploadRequest creates the request for uploading a csaf document by passing the filename.
|
||||
// According to the flags values the multipart sections of the request are established.
|
||||
// It returns the created http request.
|
||||
func (p *processor) uploadRequest(filename string) (*http.Request, error) {
|
||||
data, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !p.opts.NoSchemaCheck {
|
||||
var doc interface{}
|
||||
if err := json.NewDecoder(bytes.NewReader(data)).Decode(&doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
errs, err := csaf.ValidateCSAF(doc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(errs) > 0 {
|
||||
writeStrings("Errors:", errs)
|
||||
return nil, errors.New("local schema check failed")
|
||||
}
|
||||
}
|
||||
|
||||
body := new(bytes.Buffer)
|
||||
writer := multipart.NewWriter(body)
|
||||
|
||||
part, err := writer.CreateFormFile("csaf", filepath.Base(filename))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := part.Write(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := writer.WriteField("tlp", p.opts.TLP); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if p.keyRing == nil && p.opts.Passphrase != nil {
|
||||
if err := writer.WriteField("passphrase", *p.opts.Passphrase); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if p.keyRing != nil {
|
||||
sig, err := p.keyRing.SignDetached(crypto.NewPlainMessage(data))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
armored, err := sig.GetArmored()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := writer.WriteField("signature", armored); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if p.opts.ExternalSigned {
|
||||
signature, err := os.ReadFile(filename + ".asc")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := writer.WriteField("signature", string(signature)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if err := writer.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req, err := http.NewRequest(http.MethodPost, p.opts.URL+"/api/upload", body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req.Header.Set("X-CSAF-PROVIDER-AUTH", p.cachedAuth)
|
||||
req.Header.Set("Content-Type", writer.FormDataContentType())
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
// process attemps to upload a file to the server.
|
||||
// It prints the response messages.
|
||||
func (p *processor) process(filename string) error {
|
||||
|
||||
if bn := filepath.Base(filename); !util.ConfirmingFileName(bn) {
|
||||
return fmt.Errorf("%q is not a confirming file name", bn)
|
||||
}
|
||||
|
||||
req, err := p.uploadRequest(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resp, err := p.httpClient().Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
var uploadErr error
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
uploadErr = fmt.Errorf("upload failed: %s", resp.Status)
|
||||
fmt.Printf("HTTPS %s\n", uploadErr)
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Name string `json:"name"`
|
||||
ReleaseDate string `json:"release_date"`
|
||||
Warnings []string `json:"warnings"`
|
||||
Errors []string `json:"errors"`
|
||||
}
|
||||
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if result.Name != "" {
|
||||
fmt.Printf("Name: %s\n", result.Name)
|
||||
}
|
||||
if result.ReleaseDate != "" {
|
||||
fmt.Printf("Release date: %s\n", result.ReleaseDate)
|
||||
}
|
||||
|
||||
writeStrings("Warnings:", result.Warnings)
|
||||
writeStrings("Errors:", result.Errors)
|
||||
|
||||
return uploadErr
|
||||
}
|
||||
|
||||
// findIniFile looks for a file in the pre-defined paths in "iniPaths".
|
||||
// The returned value will be the name of file if found, otherwise an empty string.
|
||||
func findIniFile() string {
|
||||
for _, f := range iniPaths {
|
||||
name, err := homedir.Expand(f)
|
||||
if err != nil {
|
||||
log.Printf("warn: %v\n", err)
|
||||
continue
|
||||
}
|
||||
if _, err := os.Stat(name); err == nil {
|
||||
return name
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// readInteractive prints a message to command line and retrieves the password from it.
|
||||
func readInteractive(prompt string, pw **string) error {
|
||||
fmt.Print(prompt)
|
||||
p, err := term.ReadPassword(int(os.Stdin.Fd()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ps := string(p)
|
||||
*pw = &ps
|
||||
return nil
|
||||
}
|
||||
|
||||
func check(err error) {
|
||||
if err != nil {
|
||||
if e, ok := err.(*flags.Error); ok && e.Type == flags.ErrHelp {
|
||||
os.Exit(0)
|
||||
}
|
||||
log.Fatalf("error: %v\n", err)
|
||||
}
|
||||
}
|
||||
import "github.com/gocsaf/csaf/v3/internal/options"
|
||||
|
||||
func main() {
|
||||
var opts options
|
||||
|
||||
parser := flags.NewParser(&opts, flags.Default)
|
||||
|
||||
args, err := parser.Parse()
|
||||
check(err)
|
||||
|
||||
if opts.Version {
|
||||
fmt.Println(util.SemVersion)
|
||||
return
|
||||
}
|
||||
|
||||
if opts.Config != nil {
|
||||
iniParser := flags.NewIniParser(parser)
|
||||
iniParser.ParseAsDefaults = true
|
||||
name, err := homedir.Expand(*opts.Config)
|
||||
check(err)
|
||||
check(iniParser.ParseFile(name))
|
||||
} else if iniFile := findIniFile(); iniFile != "" {
|
||||
iniParser := flags.NewIniParser(parser)
|
||||
iniParser.ParseAsDefaults = true
|
||||
check(iniParser.ParseFile(iniFile))
|
||||
}
|
||||
|
||||
if opts.PasswordInteractive {
|
||||
check(readInteractive("Enter auth password: ", &opts.Password))
|
||||
}
|
||||
|
||||
if opts.PassphraseInteractive {
|
||||
check(readInteractive("Enter OpenPGP passphrase: ", &opts.Passphrase))
|
||||
}
|
||||
|
||||
if opts.ClientCert != nil && opts.ClientKey == nil || opts.ClientCert == nil && opts.ClientKey != nil {
|
||||
log.Println("Both client-key and client-cert options must be set for the authentication.")
|
||||
return
|
||||
}
|
||||
|
||||
p, err := newProcessor(&opts)
|
||||
check(err)
|
||||
|
||||
if opts.Action == "create" {
|
||||
check(p.create())
|
||||
return
|
||||
}
|
||||
|
||||
if len(args) == 0 {
|
||||
log.Println("No CSAF files given.")
|
||||
}
|
||||
|
||||
for _, arg := range args {
|
||||
check(p.process(arg))
|
||||
}
|
||||
args, cfg, err := parseArgsConfig()
|
||||
options.ErrorCheck(err)
|
||||
options.ErrorCheck(cfg.prepare())
|
||||
p := &processor{cfg: cfg}
|
||||
options.ErrorCheck(p.run(args))
|
||||
}
|
||||
|
|
|
|||
278
cmd/csaf_uploader/processor.go
Normal file
278
cmd/csaf_uploader/processor.go
Normal file
|
|
@ -0,0 +1,278 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022, 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022, 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/ProtonMail/gopenpgp/v2/armor"
|
||||
"github.com/ProtonMail/gopenpgp/v2/constants"
|
||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
type processor struct {
|
||||
cfg *config
|
||||
}
|
||||
|
||||
// httpClient initializes the http.Client according to the "Insecure" flag
|
||||
// and the TLS client files for authentication and returns it.
|
||||
func (p *processor) httpClient() *http.Client {
|
||||
var client http.Client
|
||||
var tlsConfig tls.Config
|
||||
|
||||
if p.cfg.Insecure {
|
||||
tlsConfig.InsecureSkipVerify = true
|
||||
}
|
||||
|
||||
if len(p.cfg.clientCerts) != 0 {
|
||||
tlsConfig.Certificates = p.cfg.clientCerts
|
||||
}
|
||||
|
||||
client.Transport = &http.Transport{
|
||||
TLSClientConfig: &tlsConfig,
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
}
|
||||
|
||||
return &client
|
||||
}
|
||||
|
||||
// writeStrings prints the passed messages under the specific passed header.
|
||||
func writeStrings(header string, messages []string) {
|
||||
if len(messages) > 0 {
|
||||
fmt.Println(header)
|
||||
for _, msg := range messages {
|
||||
fmt.Printf("\t%s\n", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// create sends an request to create the initial files and directories
|
||||
// on the server. It prints the response messages.
|
||||
func (p *processor) create() error {
|
||||
req, err := http.NewRequest(http.MethodGet, p.cfg.URL+"/api/create", nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Set("X-CSAF-PROVIDER-AUTH", p.cfg.cachedAuth)
|
||||
|
||||
resp, err := p.httpClient().Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
var createError error
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
createError = fmt.Errorf("create failed: %s", resp.Status)
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Message string `json:"message"`
|
||||
Errors []string `json:"errors"`
|
||||
}
|
||||
|
||||
if err := misc.StrictJSONParse(resp.Body, &result); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if result.Message != "" {
|
||||
fmt.Printf("\t%s\n", result.Message)
|
||||
}
|
||||
|
||||
writeStrings("Errors:", result.Errors)
|
||||
|
||||
return createError
|
||||
}
|
||||
|
||||
// uploadRequest creates the request for uploading a csaf document by passing the filename.
|
||||
// According to the flags values the multipart sections of the request are established.
|
||||
// It returns the created http request.
|
||||
func (p *processor) uploadRequest(filename string) (*http.Request, error) {
|
||||
data, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !p.cfg.NoSchemaCheck {
|
||||
var doc any
|
||||
if err := misc.StrictJSONParse(bytes.NewReader(data), &doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
errs, err := csaf.ValidateCSAF(doc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(errs) > 0 {
|
||||
writeStrings("Errors:", errs)
|
||||
return nil, errors.New("local schema check failed")
|
||||
}
|
||||
|
||||
eval := util.NewPathEval()
|
||||
if err := util.IDMatchesFilename(eval, doc, filepath.Base(filename)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
body := new(bytes.Buffer)
|
||||
writer := multipart.NewWriter(body)
|
||||
|
||||
// As the csaf_provider only accepts uploads with mime type
|
||||
// "application/json" we have to set this.
|
||||
part, err := misc.CreateFormFile(
|
||||
writer, "csaf", filepath.Base(filename), "application/json")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := part.Write(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := writer.WriteField("tlp", p.cfg.TLP); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if p.cfg.keyRing == nil && p.cfg.Passphrase != nil {
|
||||
if err := writer.WriteField("passphrase", *p.cfg.Passphrase); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if p.cfg.keyRing != nil {
|
||||
sig, err := p.cfg.keyRing.SignDetached(crypto.NewPlainMessage(data))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
armored, err := armor.ArmorWithTypeAndCustomHeaders(
|
||||
sig.Data, constants.PGPSignatureHeader, "", "")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := writer.WriteField("signature", armored); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if p.cfg.ExternalSigned {
|
||||
signature, err := os.ReadFile(filename + ".asc")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := writer.WriteField("signature", string(signature)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if err := writer.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req, err := http.NewRequest(http.MethodPost, p.cfg.URL+"/api/upload", body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req.Header.Set("X-CSAF-PROVIDER-AUTH", p.cfg.cachedAuth)
|
||||
req.Header.Set("Content-Type", writer.FormDataContentType())
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
// process attemps to upload a file to the server.
|
||||
// It prints the response messages.
|
||||
func (p *processor) process(filename string) error {
|
||||
|
||||
if bn := filepath.Base(filename); !util.ConformingFileName(bn) {
|
||||
return fmt.Errorf("%q is not a conforming file name", bn)
|
||||
}
|
||||
|
||||
req, err := p.uploadRequest(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resp, err := p.httpClient().Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
var uploadErr error
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
uploadErr = fmt.Errorf("upload failed: %s", resp.Status)
|
||||
fmt.Printf("HTTPS %s\n", uploadErr)
|
||||
}
|
||||
|
||||
// We expect a JSON answer so all other is not valid.
|
||||
if !strings.Contains(resp.Header.Get("Content-Type"), "application/json") {
|
||||
var sb strings.Builder
|
||||
if _, err := io.Copy(&sb, resp.Body); err != nil {
|
||||
return fmt.Errorf("reading non-JSON reply from server failed: %v", err)
|
||||
}
|
||||
return fmt.Errorf("non-JSON reply from server: %v", sb.String())
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Name string `json:"name"`
|
||||
ReleaseDate string `json:"release_date"`
|
||||
Warnings []string `json:"warnings"`
|
||||
Errors []string `json:"errors"`
|
||||
}
|
||||
|
||||
if err := misc.StrictJSONParse(resp.Body, &result); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if result.Name != "" {
|
||||
fmt.Printf("Name: %s\n", result.Name)
|
||||
}
|
||||
if result.ReleaseDate != "" {
|
||||
fmt.Printf("Release date: %s\n", result.ReleaseDate)
|
||||
}
|
||||
|
||||
writeStrings("Warnings:", result.Warnings)
|
||||
writeStrings("Errors:", result.Errors)
|
||||
|
||||
return uploadErr
|
||||
}
|
||||
|
||||
func (p *processor) run(args []string) error {
|
||||
|
||||
if p.cfg.Action == "create" {
|
||||
if err := p.create(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(args) == 0 {
|
||||
log.Println("No CSAF files given.")
|
||||
}
|
||||
|
||||
for _, arg := range args {
|
||||
if err := p.process(arg); err != nil {
|
||||
return fmt.Errorf("processing %q failed: %v", arg, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
308
cmd/csaf_validator/main.go
Normal file
308
cmd/csaf_validator/main.go
Normal file
|
|
@ -0,0 +1,308 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
// Package main implements the csaf_validator tool.
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/jessevdk/go-flags"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/csaf"
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
const (
|
||||
exitCodeSchemaInvalid = 2 << iota
|
||||
exitCodeNoRemoteValidator
|
||||
exitCodeFailedRemoteValidation
|
||||
exitCodeAllValid = 0
|
||||
)
|
||||
|
||||
type options struct {
|
||||
Version bool `long:"version" description:"Display version of the binary"`
|
||||
RemoteValidator string `long:"validator" description:"URL to validate documents remotely" value-name:"URL"`
|
||||
RemoteValidatorCache string `long:"validator_cache" description:"FILE to cache remote validations" value-name:"FILE"`
|
||||
RemoteValidatorPresets []string `long:"validator_preset" description:"One or more presets to validate remotely" default:"mandatory"`
|
||||
Output string `short:"o" long:"output" description:"If a remote validator was used, display AMOUNT ('all', 'important' or 'short') results" value-name:"AMOUNT"`
|
||||
}
|
||||
|
||||
func main() {
|
||||
opts := new(options)
|
||||
|
||||
parser := flags.NewParser(opts, flags.Default)
|
||||
parser.Usage = "[OPTIONS] files..."
|
||||
files, err := parser.Parse()
|
||||
errCheck(err)
|
||||
|
||||
if opts.Version {
|
||||
fmt.Println(util.SemVersion)
|
||||
return
|
||||
}
|
||||
|
||||
if len(files) == 0 {
|
||||
log.Println("No files given.")
|
||||
return
|
||||
}
|
||||
|
||||
errCheck(run(opts, files))
|
||||
}
|
||||
|
||||
// run validates the given files.
|
||||
func run(opts *options, files []string) error {
|
||||
exitCode := exitCodeAllValid
|
||||
|
||||
var validator csaf.RemoteValidator
|
||||
eval := util.NewPathEval()
|
||||
|
||||
if opts.RemoteValidator != "" {
|
||||
validatorOptions := csaf.RemoteValidatorOptions{
|
||||
URL: opts.RemoteValidator,
|
||||
Presets: opts.RemoteValidatorPresets,
|
||||
Cache: opts.RemoteValidatorCache,
|
||||
}
|
||||
var err error
|
||||
if validator, err = validatorOptions.Open(); err != nil {
|
||||
return fmt.Errorf(
|
||||
"preparing remote validator failed: %w", err)
|
||||
}
|
||||
defer validator.Close()
|
||||
} else {
|
||||
exitCode |= exitCodeNoRemoteValidator
|
||||
log.Printf("warn: no remote validator specified")
|
||||
}
|
||||
|
||||
// Select amount level of output for remote validation.
|
||||
var printResult func(*csaf.RemoteValidationResult)
|
||||
switch opts.Output {
|
||||
case "all":
|
||||
printResult = printAll
|
||||
case "short":
|
||||
printResult = printShort
|
||||
case "important":
|
||||
printResult = printImportant
|
||||
case "":
|
||||
printResult = noPrint
|
||||
default:
|
||||
return fmt.Errorf("unknown output amount %q", opts.Output)
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
// Check if the file name is valid.
|
||||
if !util.ConformingFileName(filepath.Base(file)) {
|
||||
fmt.Printf("%q is not a valid advisory name.\n", file)
|
||||
}
|
||||
doc, err := loadJSONFromFile(file)
|
||||
if err != nil {
|
||||
log.Printf("error: loading %q as JSON failed: %v\n", file, err)
|
||||
continue
|
||||
}
|
||||
// Validate against Schema.
|
||||
validationErrs, err := csaf.ValidateCSAF(doc)
|
||||
if err != nil {
|
||||
log.Printf("error: validating %q against schema failed: %v\n",
|
||||
file, err)
|
||||
|
||||
}
|
||||
if len(validationErrs) > 0 {
|
||||
exitCode |= exitCodeSchemaInvalid
|
||||
fmt.Printf("schema validation errors of %q\n", file)
|
||||
for _, vErr := range validationErrs {
|
||||
fmt.Printf(" * %s\n", vErr)
|
||||
}
|
||||
} else {
|
||||
fmt.Printf("%q passes the schema validation.\n", file)
|
||||
}
|
||||
|
||||
// Check filename against ID
|
||||
if err := util.IDMatchesFilename(eval, doc, filepath.Base(file)); err != nil {
|
||||
log.Printf("%s: %s.\n", file, err)
|
||||
}
|
||||
|
||||
// Validate against remote validator.
|
||||
if validator != nil {
|
||||
rvr, err := validator.Validate(doc)
|
||||
if err != nil {
|
||||
return fmt.Errorf("remote validation of %q failed: %w",
|
||||
file, err)
|
||||
}
|
||||
printResult(rvr)
|
||||
var passes string
|
||||
if rvr.Valid {
|
||||
passes = "passes"
|
||||
} else {
|
||||
exitCode |= exitCodeFailedRemoteValidation
|
||||
passes = "does not pass"
|
||||
}
|
||||
fmt.Printf("%q %s remote validation.\n", file, passes)
|
||||
}
|
||||
}
|
||||
|
||||
// Exit code is based on validation results
|
||||
os.Exit(exitCode)
|
||||
return nil
|
||||
}
|
||||
|
||||
// noPrint suppresses the output of the validation result.
|
||||
func noPrint(*csaf.RemoteValidationResult) {}
|
||||
|
||||
// messageInstancePaths aggregates errors, warnings and infos by their
|
||||
// message.
|
||||
type messageInstancePaths struct {
|
||||
message string
|
||||
paths []string
|
||||
}
|
||||
|
||||
// messageInstancePathsList is a list for errors, warnings or infos.
|
||||
type messageInstancePathsList []messageInstancePaths
|
||||
|
||||
// addAll adds all errors, warnings or infos of a test.
|
||||
func (mipl *messageInstancePathsList) addAll(rtrs []csaf.RemoteTestResult) {
|
||||
for _, rtr := range rtrs {
|
||||
mipl.add(rtr)
|
||||
}
|
||||
}
|
||||
|
||||
// add adds a test result unless it is a duplicate.
|
||||
func (mipl *messageInstancePathsList) add(rtr csaf.RemoteTestResult) {
|
||||
for i := range *mipl {
|
||||
m := &(*mipl)[i]
|
||||
// Already have this message?
|
||||
if m.message == rtr.Message {
|
||||
for _, path := range m.paths {
|
||||
// Avoid dupes.
|
||||
if path == rtr.InstancePath {
|
||||
return
|
||||
}
|
||||
}
|
||||
m.paths = append(m.paths, rtr.InstancePath)
|
||||
return
|
||||
}
|
||||
}
|
||||
*mipl = append(*mipl, messageInstancePaths{
|
||||
message: rtr.Message,
|
||||
paths: []string{rtr.InstancePath},
|
||||
})
|
||||
}
|
||||
|
||||
// print prints the details of the list to stdout if there are any.
|
||||
func (mipl messageInstancePathsList) print(info string) {
|
||||
if len(mipl) == 0 {
|
||||
return
|
||||
}
|
||||
fmt.Println(info)
|
||||
for i := range mipl {
|
||||
mip := &mipl[i]
|
||||
fmt.Printf(" message: %s\n", mip.message)
|
||||
fmt.Println(" instance path(s):")
|
||||
for _, path := range mip.paths {
|
||||
fmt.Printf(" %s\n", path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// printShort outputs the validation result in an aggregated version.
|
||||
func printShort(rvr *csaf.RemoteValidationResult) {
|
||||
|
||||
var errors, warnings, infos messageInstancePathsList
|
||||
|
||||
for i := range rvr.Tests {
|
||||
test := &rvr.Tests[i]
|
||||
errors.addAll(test.Error)
|
||||
warnings.addAll(test.Warning)
|
||||
infos.addAll(test.Info)
|
||||
}
|
||||
|
||||
fmt.Printf("isValid: %t\n", rvr.Valid)
|
||||
errors.print("errors:")
|
||||
warnings.print("warnings:")
|
||||
infos.print("infos:")
|
||||
}
|
||||
|
||||
// printImportant displays only the test results which are really relevant.
|
||||
func printImportant(rvr *csaf.RemoteValidationResult) {
|
||||
printRemoteValidationResult(rvr, func(rt *csaf.RemoteTest) bool {
|
||||
return !rt.Valid ||
|
||||
len(rt.Info) > 0 || len(rt.Error) > 0 || len(rt.Warning) > 0
|
||||
})
|
||||
}
|
||||
|
||||
// printAll displays all test results.
|
||||
func printAll(rvr *csaf.RemoteValidationResult) {
|
||||
printRemoteValidationResult(rvr, func(*csaf.RemoteTest) bool {
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// printInstanceAndMessages prints the message and the instance path of
|
||||
// a test result.
|
||||
func printInstanceAndMessages(info string, me []csaf.RemoteTestResult) {
|
||||
if len(me) == 0 {
|
||||
return
|
||||
}
|
||||
fmt.Printf(" %s\n", info)
|
||||
for _, test := range me {
|
||||
fmt.Printf(" instance path: %s\n", test.InstancePath)
|
||||
fmt.Printf(" message: %s\n", test.Message)
|
||||
}
|
||||
}
|
||||
|
||||
// printRemoteValidationResult prints a filtered output of the remote validation result.
|
||||
func printRemoteValidationResult(
|
||||
rvr *csaf.RemoteValidationResult,
|
||||
accept func(*csaf.RemoteTest) bool,
|
||||
) {
|
||||
|
||||
fmt.Printf("isValid: %t\n", rvr.Valid)
|
||||
fmt.Println("tests:")
|
||||
nl := false
|
||||
for i := range rvr.Tests {
|
||||
test := &rvr.Tests[i]
|
||||
if !accept(test) {
|
||||
continue
|
||||
}
|
||||
if nl {
|
||||
fmt.Println()
|
||||
} else {
|
||||
nl = true
|
||||
}
|
||||
fmt.Printf(" name: %s\n", test.Name)
|
||||
fmt.Printf(" isValid: %t\n", test.Valid)
|
||||
printInstanceAndMessages("errors:", test.Error)
|
||||
printInstanceAndMessages("warnings:", test.Warning)
|
||||
printInstanceAndMessages("infos:", test.Info)
|
||||
}
|
||||
}
|
||||
|
||||
func errCheck(err error) {
|
||||
if err != nil {
|
||||
if flags.WroteHelp(err) {
|
||||
os.Exit(0)
|
||||
}
|
||||
log.Fatalf("error: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
// loadJSONFromFile loads a JSON document from a file.
|
||||
func loadJSONFromFile(fname string) (any, error) {
|
||||
f, err := os.Open(fname)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
var doc any
|
||||
if err = misc.StrictJSONParse(f, &doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return doc, err
|
||||
}
|
||||
391
csaf/advisories.go
Normal file
391
csaf/advisories.go
Normal file
|
|
@ -0,0 +1,391 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package csaf
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
// AdvisoryFile constructs the urls of a remote file.
|
||||
type AdvisoryFile interface {
|
||||
slog.LogValuer
|
||||
URL() string
|
||||
SHA256URL() string
|
||||
SHA512URL() string
|
||||
SignURL() string
|
||||
IsDirectory() bool
|
||||
}
|
||||
|
||||
// PlainAdvisoryFile contains all relevant urls of a remote file.
|
||||
type PlainAdvisoryFile struct {
|
||||
Path string
|
||||
SHA256 string
|
||||
SHA512 string
|
||||
Sign string
|
||||
}
|
||||
|
||||
// URL returns the URL of this advisory.
|
||||
func (paf PlainAdvisoryFile) URL() string { return paf.Path }
|
||||
|
||||
// SHA256URL returns the URL of SHA256 hash file of this advisory.
|
||||
func (paf PlainAdvisoryFile) SHA256URL() string { return paf.SHA256 }
|
||||
|
||||
// SHA512URL returns the URL of SHA512 hash file of this advisory.
|
||||
func (paf PlainAdvisoryFile) SHA512URL() string { return paf.SHA512 }
|
||||
|
||||
// SignURL returns the URL of signature file of this advisory.
|
||||
func (paf PlainAdvisoryFile) SignURL() string { return paf.Sign }
|
||||
|
||||
// IsDirectory returns true, if was fetched via directory feeds.
|
||||
func (paf PlainAdvisoryFile) IsDirectory() bool { return false }
|
||||
|
||||
// LogValue implements [slog.LogValuer]
|
||||
func (paf PlainAdvisoryFile) LogValue() slog.Value {
|
||||
return slog.GroupValue(slog.String("url", paf.URL()))
|
||||
}
|
||||
|
||||
// DirectoryAdvisoryFile only contains the base file path.
|
||||
// The hash and signature files are directly constructed by extending
|
||||
// the file name.
|
||||
type DirectoryAdvisoryFile struct {
|
||||
Path string
|
||||
}
|
||||
|
||||
// URL returns the URL of this advisory.
|
||||
func (daf DirectoryAdvisoryFile) URL() string { return daf.Path }
|
||||
|
||||
// SHA256URL returns the URL of SHA256 hash file of this advisory.
|
||||
func (daf DirectoryAdvisoryFile) SHA256URL() string { return daf.Path + ".sha256" }
|
||||
|
||||
// SHA512URL returns the URL of SHA512 hash file of this advisory.
|
||||
func (daf DirectoryAdvisoryFile) SHA512URL() string { return daf.Path + ".sha512" }
|
||||
|
||||
// SignURL returns the URL of signature file of this advisory.
|
||||
func (daf DirectoryAdvisoryFile) SignURL() string { return daf.Path + ".asc" }
|
||||
|
||||
// IsDirectory returns true, if was fetched via directory feeds.
|
||||
func (daf DirectoryAdvisoryFile) IsDirectory() bool { return true }
|
||||
|
||||
// LogValue implements [slog.LogValuer]
|
||||
func (daf DirectoryAdvisoryFile) LogValue() slog.Value {
|
||||
return slog.GroupValue(slog.String("url", daf.URL()))
|
||||
}
|
||||
|
||||
// AdvisoryFileProcessor implements the extraction of
|
||||
// advisory file names from a given provider metadata.
|
||||
type AdvisoryFileProcessor struct {
|
||||
AgeAccept func(time.Time) bool
|
||||
Log func(loglevel slog.Level, format string, args ...any)
|
||||
client util.Client
|
||||
expr *util.PathEval
|
||||
doc any
|
||||
pmdURL *url.URL
|
||||
}
|
||||
|
||||
// NewAdvisoryFileProcessor constructs a filename extractor
|
||||
// for a given metadata document.
|
||||
func NewAdvisoryFileProcessor(
|
||||
client util.Client,
|
||||
expr *util.PathEval,
|
||||
doc any,
|
||||
pmdURL *url.URL,
|
||||
) *AdvisoryFileProcessor {
|
||||
return &AdvisoryFileProcessor{
|
||||
client: client,
|
||||
expr: expr,
|
||||
doc: doc,
|
||||
pmdURL: pmdURL,
|
||||
}
|
||||
}
|
||||
|
||||
// empty checks if list of strings contains at least one none empty string.
|
||||
func empty(arr []string) bool {
|
||||
for _, s := range arr {
|
||||
if s != "" {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Process extracts the advisory filenames and passes them with
|
||||
// the corresponding label to fn.
|
||||
func (afp *AdvisoryFileProcessor) Process(
|
||||
fn func(TLPLabel, []AdvisoryFile) error,
|
||||
) error {
|
||||
lg := afp.Log
|
||||
if lg == nil {
|
||||
lg = func(loglevel slog.Level, format string, args ...any) {
|
||||
slog.Log(context.Background(), loglevel, "AdvisoryFileProcessor.Process: "+format, args...)
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we have ROLIE feeds.
|
||||
rolie, err := afp.expr.Eval(
|
||||
"$.distributions[*].rolie.feeds", afp.doc)
|
||||
if err != nil {
|
||||
lg(slog.LevelError, "rolie check failed", "err", err)
|
||||
return err
|
||||
}
|
||||
|
||||
fs, hasRolie := rolie.([]any)
|
||||
hasRolie = hasRolie && len(fs) > 0
|
||||
|
||||
if hasRolie {
|
||||
var feeds [][]Feed
|
||||
if err := util.ReMarshalJSON(&feeds, rolie); err != nil {
|
||||
return err
|
||||
}
|
||||
lg(slog.LevelInfo, "Found ROLIE feed(s)", "length", len(feeds))
|
||||
|
||||
for _, feed := range feeds {
|
||||
if err := afp.processROLIE(feed, fn); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No rolie feeds -> try to load files from index.txt
|
||||
|
||||
directoryURLs, err := afp.expr.Eval(
|
||||
"$.distributions[*].directory_url", afp.doc)
|
||||
|
||||
var dirURLs []string
|
||||
|
||||
if err != nil {
|
||||
lg(slog.LevelError, "extracting directory URLs failed", "err", err)
|
||||
} else {
|
||||
var ok bool
|
||||
dirURLs, ok = util.AsStrings(directoryURLs)
|
||||
if !ok {
|
||||
lg(slog.LevelError, "directory_urls are not strings")
|
||||
}
|
||||
}
|
||||
|
||||
// Not found -> fall back to PMD url
|
||||
if empty(dirURLs) {
|
||||
baseURL, err := util.BaseURL(afp.pmdURL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dirURLs = []string{baseURL}
|
||||
}
|
||||
|
||||
for _, base := range dirURLs {
|
||||
if base == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Use changes.csv to be able to filter by age.
|
||||
files, err := afp.loadChanges(base, lg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// XXX: Is treating as white okay? better look into the advisories?
|
||||
if err := fn(TLPLabelWhite, files); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} // TODO: else scan directories?
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadChanges loads baseURL/changes.csv and returns a list of files
|
||||
// prefixed by baseURL/.
|
||||
func (afp *AdvisoryFileProcessor) loadChanges(
|
||||
baseURL string,
|
||||
lg func(slog.Level, string, ...any),
|
||||
) ([]AdvisoryFile, error) {
|
||||
base, err := url.Parse(baseURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
changesURL := base.JoinPath("changes.csv").String()
|
||||
|
||||
resp, err := afp.client.Get(changesURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("fetching %s failed. Status code %d (%s)",
|
||||
changesURL, resp.StatusCode, resp.Status)
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
var files []AdvisoryFile
|
||||
c := csv.NewReader(resp.Body)
|
||||
const (
|
||||
pathColumn = 0
|
||||
timeColumn = 1
|
||||
)
|
||||
for line := 1; ; line++ {
|
||||
r, err := c.Read()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(r) < 2 {
|
||||
lg(slog.LevelError, "Not enough columns", "line", line)
|
||||
continue
|
||||
}
|
||||
t, err := time.Parse(time.RFC3339, r[timeColumn])
|
||||
if err != nil {
|
||||
lg(slog.LevelError, "Invalid time stamp in line", "url", changesURL, "line", line, "err", err)
|
||||
continue
|
||||
}
|
||||
// Apply date range filtering.
|
||||
if afp.AgeAccept != nil && !afp.AgeAccept(t) {
|
||||
continue
|
||||
}
|
||||
path := r[pathColumn]
|
||||
if _, err := url.Parse(path); err != nil {
|
||||
lg(slog.LevelError, "Contains an invalid URL", "url", changesURL, "path", path, "line", line)
|
||||
continue
|
||||
}
|
||||
|
||||
pathURL, err := url.Parse(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files = append(files,
|
||||
DirectoryAdvisoryFile{Path: misc.JoinURL(base, pathURL).String()})
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (afp *AdvisoryFileProcessor) processROLIE(
|
||||
labeledFeeds []Feed,
|
||||
fn func(TLPLabel, []AdvisoryFile) error,
|
||||
) error {
|
||||
for i := range labeledFeeds {
|
||||
feed := &labeledFeeds[i]
|
||||
if feed.URL == nil {
|
||||
continue
|
||||
}
|
||||
feedURL, err := url.Parse(string(*feed.URL))
|
||||
if err != nil {
|
||||
slog.Error("Invalid URL in feed", "feed", *feed.URL, "err", err)
|
||||
continue
|
||||
}
|
||||
slog.Info("Got feed URL", "feed", feedURL)
|
||||
|
||||
fb, err := util.BaseURL(feedURL)
|
||||
if err != nil {
|
||||
slog.Error("Invalid feed base URL", "url", fb, "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
res, err := afp.client.Get(feedURL.String())
|
||||
if err != nil {
|
||||
slog.Error("Cannot get feed", "err", err)
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
slog.Error("Fetching failed",
|
||||
"url", feedURL, "status_code", res.StatusCode, "status", res.Status)
|
||||
continue
|
||||
}
|
||||
rfeed, err := func() (*ROLIEFeed, error) {
|
||||
defer res.Body.Close()
|
||||
return LoadROLIEFeed(res.Body)
|
||||
}()
|
||||
if err != nil {
|
||||
slog.Error("Loading ROLIE feed failed", "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
var files []AdvisoryFile
|
||||
|
||||
resolve := func(u string) string {
|
||||
if u == "" {
|
||||
return ""
|
||||
}
|
||||
p, err := url.Parse(u)
|
||||
if err != nil {
|
||||
slog.Error("Invalid URL", "url", u, "err", err)
|
||||
return ""
|
||||
}
|
||||
return p.String()
|
||||
}
|
||||
|
||||
rfeed.Entries(func(entry *Entry) {
|
||||
// Filter if we have date checking.
|
||||
if afp.AgeAccept != nil {
|
||||
if t := time.Time(entry.Updated); !t.IsZero() && !afp.AgeAccept(t) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
var self, sha256, sha512, sign string
|
||||
|
||||
for i := range entry.Link {
|
||||
link := &entry.Link[i]
|
||||
lower := strings.ToLower(link.HRef)
|
||||
switch link.Rel {
|
||||
case "self":
|
||||
self = resolve(link.HRef)
|
||||
case "signature":
|
||||
sign = resolve(link.HRef)
|
||||
case "hash":
|
||||
switch {
|
||||
case strings.HasSuffix(lower, ".sha256"):
|
||||
sha256 = resolve(link.HRef)
|
||||
case strings.HasSuffix(lower, ".sha512"):
|
||||
sha512 = resolve(link.HRef)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self == "" {
|
||||
return
|
||||
}
|
||||
|
||||
var file AdvisoryFile
|
||||
|
||||
switch {
|
||||
case sha256 == "" && sha512 == "":
|
||||
slog.Error("No hash listed on ROLIE feed", "file", self)
|
||||
return
|
||||
case sign == "":
|
||||
slog.Error("No signature listed on ROLIE feed", "file", self)
|
||||
return
|
||||
default:
|
||||
file = PlainAdvisoryFile{self, sha256, sha512, sign}
|
||||
}
|
||||
|
||||
files = append(files, file)
|
||||
})
|
||||
|
||||
var label TLPLabel
|
||||
if feed.TLPLabel != nil {
|
||||
label = *feed.TLPLabel
|
||||
} else {
|
||||
label = "unknown"
|
||||
}
|
||||
|
||||
if err := fn(label, files); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
1652
csaf/advisory.go
Normal file
1652
csaf/advisory.go
Normal file
File diff suppressed because it is too large
Load diff
46
csaf/advisory_test.go
Normal file
46
csaf/advisory_test.go
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
package csaf
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestLoadAdvisory(t *testing.T) {
|
||||
type args struct {
|
||||
jsonDir string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "Valid documents",
|
||||
args: args{jsonDir: "csaf-documents/valid"},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Garbage trailing data",
|
||||
args: args{jsonDir: "csaf-documents/trailing-garbage-data"},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if err := filepath.Walk("../testdata/"+tt.args.jsonDir, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if info.Mode().IsRegular() && filepath.Ext(info.Name()) == ".json" {
|
||||
if _, err := LoadAdvisory(path); (err != nil) != tt.wantErr {
|
||||
t.Errorf("LoadAdvisory() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
309
csaf/cvss20enums.go
Normal file
309
csaf/cvss20enums.go
Normal file
|
|
@ -0,0 +1,309 @@
|
|||
// SPDX-License-Identifier: BSD-3-Clause
|
||||
// SPDX-FileCopyrightText: 2017 FIRST.ORG, INC.
|
||||
//
|
||||
// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE!
|
||||
|
||||
package csaf
|
||||
|
||||
// CVSS20AccessComplexity represents the accessComplexityType in CVSS20.
|
||||
type CVSS20AccessComplexity string
|
||||
|
||||
const (
|
||||
// CVSS20AccessComplexityHigh is a constant for "HIGH".
|
||||
CVSS20AccessComplexityHigh CVSS20AccessComplexity = "HIGH"
|
||||
// CVSS20AccessComplexityMedium is a constant for "MEDIUM".
|
||||
CVSS20AccessComplexityMedium CVSS20AccessComplexity = "MEDIUM"
|
||||
// CVSS20AccessComplexityLow is a constant for "LOW".
|
||||
CVSS20AccessComplexityLow CVSS20AccessComplexity = "LOW"
|
||||
)
|
||||
|
||||
var cvss20AccessComplexityPattern = alternativesUnmarshal(
|
||||
string(CVSS20AccessComplexityHigh),
|
||||
string(CVSS20AccessComplexityMedium),
|
||||
string(CVSS20AccessComplexityLow),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS20AccessComplexity) UnmarshalText(data []byte) error {
|
||||
s, err := cvss20AccessComplexityPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS20AccessComplexity(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS20AccessVector represents the accessVectorType in CVSS20.
|
||||
type CVSS20AccessVector string
|
||||
|
||||
const (
|
||||
// CVSS20AccessVectorNetwork is a constant for "NETWORK".
|
||||
CVSS20AccessVectorNetwork CVSS20AccessVector = "NETWORK"
|
||||
// CVSS20AccessVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK".
|
||||
CVSS20AccessVectorAdjacentNetwork CVSS20AccessVector = "ADJACENT_NETWORK"
|
||||
// CVSS20AccessVectorLocal is a constant for "LOCAL".
|
||||
CVSS20AccessVectorLocal CVSS20AccessVector = "LOCAL"
|
||||
)
|
||||
|
||||
var cvss20AccessVectorPattern = alternativesUnmarshal(
|
||||
string(CVSS20AccessVectorNetwork),
|
||||
string(CVSS20AccessVectorAdjacentNetwork),
|
||||
string(CVSS20AccessVectorLocal),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS20AccessVector) UnmarshalText(data []byte) error {
|
||||
s, err := cvss20AccessVectorPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS20AccessVector(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS20Authentication represents the authenticationType in CVSS20.
|
||||
type CVSS20Authentication string
|
||||
|
||||
const (
|
||||
// CVSS20AuthenticationMultiple is a constant for "MULTIPLE".
|
||||
CVSS20AuthenticationMultiple CVSS20Authentication = "MULTIPLE"
|
||||
// CVSS20AuthenticationSingle is a constant for "SINGLE".
|
||||
CVSS20AuthenticationSingle CVSS20Authentication = "SINGLE"
|
||||
// CVSS20AuthenticationNone is a constant for "NONE".
|
||||
CVSS20AuthenticationNone CVSS20Authentication = "NONE"
|
||||
)
|
||||
|
||||
var cvss20AuthenticationPattern = alternativesUnmarshal(
|
||||
string(CVSS20AuthenticationMultiple),
|
||||
string(CVSS20AuthenticationSingle),
|
||||
string(CVSS20AuthenticationNone),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS20Authentication) UnmarshalText(data []byte) error {
|
||||
s, err := cvss20AuthenticationPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS20Authentication(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS20CiaRequirement represents the ciaRequirementType in CVSS20.
|
||||
type CVSS20CiaRequirement string
|
||||
|
||||
const (
|
||||
// CVSS20CiaRequirementLow is a constant for "LOW".
|
||||
CVSS20CiaRequirementLow CVSS20CiaRequirement = "LOW"
|
||||
// CVSS20CiaRequirementMedium is a constant for "MEDIUM".
|
||||
CVSS20CiaRequirementMedium CVSS20CiaRequirement = "MEDIUM"
|
||||
// CVSS20CiaRequirementHigh is a constant for "HIGH".
|
||||
CVSS20CiaRequirementHigh CVSS20CiaRequirement = "HIGH"
|
||||
// CVSS20CiaRequirementNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS20CiaRequirementNotDefined CVSS20CiaRequirement = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss20CiaRequirementPattern = alternativesUnmarshal(
|
||||
string(CVSS20CiaRequirementLow),
|
||||
string(CVSS20CiaRequirementMedium),
|
||||
string(CVSS20CiaRequirementHigh),
|
||||
string(CVSS20CiaRequirementNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS20CiaRequirement) UnmarshalText(data []byte) error {
|
||||
s, err := cvss20CiaRequirementPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS20CiaRequirement(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS20Cia represents the ciaType in CVSS20.
|
||||
type CVSS20Cia string
|
||||
|
||||
const (
|
||||
// CVSS20CiaNone is a constant for "NONE".
|
||||
CVSS20CiaNone CVSS20Cia = "NONE"
|
||||
// CVSS20CiaPartial is a constant for "PARTIAL".
|
||||
CVSS20CiaPartial CVSS20Cia = "PARTIAL"
|
||||
// CVSS20CiaComplete is a constant for "COMPLETE".
|
||||
CVSS20CiaComplete CVSS20Cia = "COMPLETE"
|
||||
)
|
||||
|
||||
var cvss20CiaPattern = alternativesUnmarshal(
|
||||
string(CVSS20CiaNone),
|
||||
string(CVSS20CiaPartial),
|
||||
string(CVSS20CiaComplete),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS20Cia) UnmarshalText(data []byte) error {
|
||||
s, err := cvss20CiaPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS20Cia(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS20CollateralDamagePotential represents the collateralDamagePotentialType in CVSS20.
|
||||
type CVSS20CollateralDamagePotential string
|
||||
|
||||
const (
|
||||
// CVSS20CollateralDamagePotentialNone is a constant for "NONE".
|
||||
CVSS20CollateralDamagePotentialNone CVSS20CollateralDamagePotential = "NONE"
|
||||
// CVSS20CollateralDamagePotentialLow is a constant for "LOW".
|
||||
CVSS20CollateralDamagePotentialLow CVSS20CollateralDamagePotential = "LOW"
|
||||
// CVSS20CollateralDamagePotentialLowMedium is a constant for "LOW_MEDIUM".
|
||||
CVSS20CollateralDamagePotentialLowMedium CVSS20CollateralDamagePotential = "LOW_MEDIUM"
|
||||
// CVSS20CollateralDamagePotentialMediumHigh is a constant for "MEDIUM_HIGH".
|
||||
CVSS20CollateralDamagePotentialMediumHigh CVSS20CollateralDamagePotential = "MEDIUM_HIGH"
|
||||
// CVSS20CollateralDamagePotentialHigh is a constant for "HIGH".
|
||||
CVSS20CollateralDamagePotentialHigh CVSS20CollateralDamagePotential = "HIGH"
|
||||
// CVSS20CollateralDamagePotentialNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS20CollateralDamagePotentialNotDefined CVSS20CollateralDamagePotential = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss20CollateralDamagePotentialPattern = alternativesUnmarshal(
|
||||
string(CVSS20CollateralDamagePotentialNone),
|
||||
string(CVSS20CollateralDamagePotentialLow),
|
||||
string(CVSS20CollateralDamagePotentialLowMedium),
|
||||
string(CVSS20CollateralDamagePotentialMediumHigh),
|
||||
string(CVSS20CollateralDamagePotentialHigh),
|
||||
string(CVSS20CollateralDamagePotentialNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS20CollateralDamagePotential) UnmarshalText(data []byte) error {
|
||||
s, err := cvss20CollateralDamagePotentialPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS20CollateralDamagePotential(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS20Exploitability represents the exploitabilityType in CVSS20.
|
||||
type CVSS20Exploitability string
|
||||
|
||||
const (
|
||||
// CVSS20ExploitabilityUnproven is a constant for "UNPROVEN".
|
||||
CVSS20ExploitabilityUnproven CVSS20Exploitability = "UNPROVEN"
|
||||
// CVSS20ExploitabilityProofOfConcept is a constant for "PROOF_OF_CONCEPT".
|
||||
CVSS20ExploitabilityProofOfConcept CVSS20Exploitability = "PROOF_OF_CONCEPT"
|
||||
// CVSS20ExploitabilityFunctional is a constant for "FUNCTIONAL".
|
||||
CVSS20ExploitabilityFunctional CVSS20Exploitability = "FUNCTIONAL"
|
||||
// CVSS20ExploitabilityHigh is a constant for "HIGH".
|
||||
CVSS20ExploitabilityHigh CVSS20Exploitability = "HIGH"
|
||||
// CVSS20ExploitabilityNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS20ExploitabilityNotDefined CVSS20Exploitability = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss20ExploitabilityPattern = alternativesUnmarshal(
|
||||
string(CVSS20ExploitabilityUnproven),
|
||||
string(CVSS20ExploitabilityProofOfConcept),
|
||||
string(CVSS20ExploitabilityFunctional),
|
||||
string(CVSS20ExploitabilityHigh),
|
||||
string(CVSS20ExploitabilityNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS20Exploitability) UnmarshalText(data []byte) error {
|
||||
s, err := cvss20ExploitabilityPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS20Exploitability(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS20RemediationLevel represents the remediationLevelType in CVSS20.
|
||||
type CVSS20RemediationLevel string
|
||||
|
||||
const (
|
||||
// CVSS20RemediationLevelOfficialFix is a constant for "OFFICIAL_FIX".
|
||||
CVSS20RemediationLevelOfficialFix CVSS20RemediationLevel = "OFFICIAL_FIX"
|
||||
// CVSS20RemediationLevelTemporaryFix is a constant for "TEMPORARY_FIX".
|
||||
CVSS20RemediationLevelTemporaryFix CVSS20RemediationLevel = "TEMPORARY_FIX"
|
||||
// CVSS20RemediationLevelWorkaround is a constant for "WORKAROUND".
|
||||
CVSS20RemediationLevelWorkaround CVSS20RemediationLevel = "WORKAROUND"
|
||||
// CVSS20RemediationLevelUnavailable is a constant for "UNAVAILABLE".
|
||||
CVSS20RemediationLevelUnavailable CVSS20RemediationLevel = "UNAVAILABLE"
|
||||
// CVSS20RemediationLevelNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS20RemediationLevelNotDefined CVSS20RemediationLevel = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss20RemediationLevelPattern = alternativesUnmarshal(
|
||||
string(CVSS20RemediationLevelOfficialFix),
|
||||
string(CVSS20RemediationLevelTemporaryFix),
|
||||
string(CVSS20RemediationLevelWorkaround),
|
||||
string(CVSS20RemediationLevelUnavailable),
|
||||
string(CVSS20RemediationLevelNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS20RemediationLevel) UnmarshalText(data []byte) error {
|
||||
s, err := cvss20RemediationLevelPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS20RemediationLevel(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS20ReportConfidence represents the reportConfidenceType in CVSS20.
|
||||
type CVSS20ReportConfidence string
|
||||
|
||||
const (
|
||||
// CVSS20ReportConfidenceUnconfirmed is a constant for "UNCONFIRMED".
|
||||
CVSS20ReportConfidenceUnconfirmed CVSS20ReportConfidence = "UNCONFIRMED"
|
||||
// CVSS20ReportConfidenceUncorroborated is a constant for "UNCORROBORATED".
|
||||
CVSS20ReportConfidenceUncorroborated CVSS20ReportConfidence = "UNCORROBORATED"
|
||||
// CVSS20ReportConfidenceConfirmed is a constant for "CONFIRMED".
|
||||
CVSS20ReportConfidenceConfirmed CVSS20ReportConfidence = "CONFIRMED"
|
||||
// CVSS20ReportConfidenceNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS20ReportConfidenceNotDefined CVSS20ReportConfidence = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss20ReportConfidencePattern = alternativesUnmarshal(
|
||||
string(CVSS20ReportConfidenceUnconfirmed),
|
||||
string(CVSS20ReportConfidenceUncorroborated),
|
||||
string(CVSS20ReportConfidenceConfirmed),
|
||||
string(CVSS20ReportConfidenceNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS20ReportConfidence) UnmarshalText(data []byte) error {
|
||||
s, err := cvss20ReportConfidencePattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS20ReportConfidence(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS20TargetDistribution represents the targetDistributionType in CVSS20.
|
||||
type CVSS20TargetDistribution string
|
||||
|
||||
const (
|
||||
// CVSS20TargetDistributionNone is a constant for "NONE".
|
||||
CVSS20TargetDistributionNone CVSS20TargetDistribution = "NONE"
|
||||
// CVSS20TargetDistributionLow is a constant for "LOW".
|
||||
CVSS20TargetDistributionLow CVSS20TargetDistribution = "LOW"
|
||||
// CVSS20TargetDistributionMedium is a constant for "MEDIUM".
|
||||
CVSS20TargetDistributionMedium CVSS20TargetDistribution = "MEDIUM"
|
||||
// CVSS20TargetDistributionHigh is a constant for "HIGH".
|
||||
CVSS20TargetDistributionHigh CVSS20TargetDistribution = "HIGH"
|
||||
// CVSS20TargetDistributionNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS20TargetDistributionNotDefined CVSS20TargetDistribution = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss20TargetDistributionPattern = alternativesUnmarshal(
|
||||
string(CVSS20TargetDistributionNone),
|
||||
string(CVSS20TargetDistributionLow),
|
||||
string(CVSS20TargetDistributionMedium),
|
||||
string(CVSS20TargetDistributionHigh),
|
||||
string(CVSS20TargetDistributionNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS20TargetDistribution) UnmarshalText(data []byte) error {
|
||||
s, err := cvss20TargetDistributionPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS20TargetDistribution(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
495
csaf/cvss3enums.go
Normal file
495
csaf/cvss3enums.go
Normal file
|
|
@ -0,0 +1,495 @@
|
|||
// SPDX-License-Identifier: BSD-3-Clause
|
||||
// SPDX-FileCopyrightText: 2017 FIRST.ORG, INC.
|
||||
//
|
||||
// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE!
|
||||
|
||||
package csaf
|
||||
|
||||
// CVSS3AttackComplexity represents the attackComplexityType in CVSS3.
|
||||
type CVSS3AttackComplexity string
|
||||
|
||||
const (
|
||||
// CVSS3AttackComplexityHigh is a constant for "HIGH".
|
||||
CVSS3AttackComplexityHigh CVSS3AttackComplexity = "HIGH"
|
||||
// CVSS3AttackComplexityLow is a constant for "LOW".
|
||||
CVSS3AttackComplexityLow CVSS3AttackComplexity = "LOW"
|
||||
)
|
||||
|
||||
var cvss3AttackComplexityPattern = alternativesUnmarshal(
|
||||
string(CVSS3AttackComplexityHigh),
|
||||
string(CVSS3AttackComplexityLow),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3AttackComplexity) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3AttackComplexityPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3AttackComplexity(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3AttackVector represents the attackVectorType in CVSS3.
|
||||
type CVSS3AttackVector string
|
||||
|
||||
const (
|
||||
// CVSS3AttackVectorNetwork is a constant for "NETWORK".
|
||||
CVSS3AttackVectorNetwork CVSS3AttackVector = "NETWORK"
|
||||
// CVSS3AttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK".
|
||||
CVSS3AttackVectorAdjacentNetwork CVSS3AttackVector = "ADJACENT_NETWORK"
|
||||
// CVSS3AttackVectorLocal is a constant for "LOCAL".
|
||||
CVSS3AttackVectorLocal CVSS3AttackVector = "LOCAL"
|
||||
// CVSS3AttackVectorPhysical is a constant for "PHYSICAL".
|
||||
CVSS3AttackVectorPhysical CVSS3AttackVector = "PHYSICAL"
|
||||
)
|
||||
|
||||
var cvss3AttackVectorPattern = alternativesUnmarshal(
|
||||
string(CVSS3AttackVectorNetwork),
|
||||
string(CVSS3AttackVectorAdjacentNetwork),
|
||||
string(CVSS3AttackVectorLocal),
|
||||
string(CVSS3AttackVectorPhysical),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3AttackVector) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3AttackVectorPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3AttackVector(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3CiaRequirement represents the ciaRequirementType in CVSS3.
|
||||
type CVSS3CiaRequirement string
|
||||
|
||||
const (
|
||||
// CVSS3CiaRequirementLow is a constant for "LOW".
|
||||
CVSS3CiaRequirementLow CVSS3CiaRequirement = "LOW"
|
||||
// CVSS3CiaRequirementMedium is a constant for "MEDIUM".
|
||||
CVSS3CiaRequirementMedium CVSS3CiaRequirement = "MEDIUM"
|
||||
// CVSS3CiaRequirementHigh is a constant for "HIGH".
|
||||
CVSS3CiaRequirementHigh CVSS3CiaRequirement = "HIGH"
|
||||
// CVSS3CiaRequirementNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS3CiaRequirementNotDefined CVSS3CiaRequirement = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss3CiaRequirementPattern = alternativesUnmarshal(
|
||||
string(CVSS3CiaRequirementLow),
|
||||
string(CVSS3CiaRequirementMedium),
|
||||
string(CVSS3CiaRequirementHigh),
|
||||
string(CVSS3CiaRequirementNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3CiaRequirement) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3CiaRequirementPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3CiaRequirement(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3Cia represents the ciaType in CVSS3.
|
||||
type CVSS3Cia string
|
||||
|
||||
const (
|
||||
// CVSS3CiaNone is a constant for "NONE".
|
||||
CVSS3CiaNone CVSS3Cia = "NONE"
|
||||
// CVSS3CiaLow is a constant for "LOW".
|
||||
CVSS3CiaLow CVSS3Cia = "LOW"
|
||||
// CVSS3CiaHigh is a constant for "HIGH".
|
||||
CVSS3CiaHigh CVSS3Cia = "HIGH"
|
||||
)
|
||||
|
||||
var cvss3CiaPattern = alternativesUnmarshal(
|
||||
string(CVSS3CiaNone),
|
||||
string(CVSS3CiaLow),
|
||||
string(CVSS3CiaHigh),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3Cia) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3CiaPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3Cia(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3Confidence represents the confidenceType in CVSS3.
|
||||
type CVSS3Confidence string
|
||||
|
||||
const (
|
||||
// CVSS3ConfidenceUnknown is a constant for "UNKNOWN".
|
||||
CVSS3ConfidenceUnknown CVSS3Confidence = "UNKNOWN"
|
||||
// CVSS3ConfidenceReasonable is a constant for "REASONABLE".
|
||||
CVSS3ConfidenceReasonable CVSS3Confidence = "REASONABLE"
|
||||
// CVSS3ConfidenceConfirmed is a constant for "CONFIRMED".
|
||||
CVSS3ConfidenceConfirmed CVSS3Confidence = "CONFIRMED"
|
||||
// CVSS3ConfidenceNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS3ConfidenceNotDefined CVSS3Confidence = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss3ConfidencePattern = alternativesUnmarshal(
|
||||
string(CVSS3ConfidenceUnknown),
|
||||
string(CVSS3ConfidenceReasonable),
|
||||
string(CVSS3ConfidenceConfirmed),
|
||||
string(CVSS3ConfidenceNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3Confidence) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3ConfidencePattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3Confidence(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3ExploitCodeMaturity represents the exploitCodeMaturityType in CVSS3.
|
||||
type CVSS3ExploitCodeMaturity string
|
||||
|
||||
const (
|
||||
// CVSS3ExploitCodeMaturityUnproven is a constant for "UNPROVEN".
|
||||
CVSS3ExploitCodeMaturityUnproven CVSS3ExploitCodeMaturity = "UNPROVEN"
|
||||
// CVSS3ExploitCodeMaturityProofOfConcept is a constant for "PROOF_OF_CONCEPT".
|
||||
CVSS3ExploitCodeMaturityProofOfConcept CVSS3ExploitCodeMaturity = "PROOF_OF_CONCEPT"
|
||||
// CVSS3ExploitCodeMaturityFunctional is a constant for "FUNCTIONAL".
|
||||
CVSS3ExploitCodeMaturityFunctional CVSS3ExploitCodeMaturity = "FUNCTIONAL"
|
||||
// CVSS3ExploitCodeMaturityHigh is a constant for "HIGH".
|
||||
CVSS3ExploitCodeMaturityHigh CVSS3ExploitCodeMaturity = "HIGH"
|
||||
// CVSS3ExploitCodeMaturityNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS3ExploitCodeMaturityNotDefined CVSS3ExploitCodeMaturity = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss3ExploitCodeMaturityPattern = alternativesUnmarshal(
|
||||
string(CVSS3ExploitCodeMaturityUnproven),
|
||||
string(CVSS3ExploitCodeMaturityProofOfConcept),
|
||||
string(CVSS3ExploitCodeMaturityFunctional),
|
||||
string(CVSS3ExploitCodeMaturityHigh),
|
||||
string(CVSS3ExploitCodeMaturityNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3ExploitCodeMaturity) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3ExploitCodeMaturityPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3ExploitCodeMaturity(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3ModifiedAttackComplexity represents the modifiedAttackComplexityType in CVSS3.
|
||||
type CVSS3ModifiedAttackComplexity string
|
||||
|
||||
const (
|
||||
// CVSS3ModifiedAttackComplexityHigh is a constant for "HIGH".
|
||||
CVSS3ModifiedAttackComplexityHigh CVSS3ModifiedAttackComplexity = "HIGH"
|
||||
// CVSS3ModifiedAttackComplexityLow is a constant for "LOW".
|
||||
CVSS3ModifiedAttackComplexityLow CVSS3ModifiedAttackComplexity = "LOW"
|
||||
// CVSS3ModifiedAttackComplexityNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS3ModifiedAttackComplexityNotDefined CVSS3ModifiedAttackComplexity = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss3ModifiedAttackComplexityPattern = alternativesUnmarshal(
|
||||
string(CVSS3ModifiedAttackComplexityHigh),
|
||||
string(CVSS3ModifiedAttackComplexityLow),
|
||||
string(CVSS3ModifiedAttackComplexityNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3ModifiedAttackComplexity) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3ModifiedAttackComplexityPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3ModifiedAttackComplexity(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3ModifiedAttackVector represents the modifiedAttackVectorType in CVSS3.
|
||||
type CVSS3ModifiedAttackVector string
|
||||
|
||||
const (
|
||||
// CVSS3ModifiedAttackVectorNetwork is a constant for "NETWORK".
|
||||
CVSS3ModifiedAttackVectorNetwork CVSS3ModifiedAttackVector = "NETWORK"
|
||||
// CVSS3ModifiedAttackVectorAdjacentNetwork is a constant for "ADJACENT_NETWORK".
|
||||
CVSS3ModifiedAttackVectorAdjacentNetwork CVSS3ModifiedAttackVector = "ADJACENT_NETWORK"
|
||||
// CVSS3ModifiedAttackVectorLocal is a constant for "LOCAL".
|
||||
CVSS3ModifiedAttackVectorLocal CVSS3ModifiedAttackVector = "LOCAL"
|
||||
// CVSS3ModifiedAttackVectorPhysical is a constant for "PHYSICAL".
|
||||
CVSS3ModifiedAttackVectorPhysical CVSS3ModifiedAttackVector = "PHYSICAL"
|
||||
// CVSS3ModifiedAttackVectorNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS3ModifiedAttackVectorNotDefined CVSS3ModifiedAttackVector = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss3ModifiedAttackVectorPattern = alternativesUnmarshal(
|
||||
string(CVSS3ModifiedAttackVectorNetwork),
|
||||
string(CVSS3ModifiedAttackVectorAdjacentNetwork),
|
||||
string(CVSS3ModifiedAttackVectorLocal),
|
||||
string(CVSS3ModifiedAttackVectorPhysical),
|
||||
string(CVSS3ModifiedAttackVectorNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3ModifiedAttackVector) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3ModifiedAttackVectorPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3ModifiedAttackVector(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3ModifiedCia represents the modifiedCiaType in CVSS3.
|
||||
type CVSS3ModifiedCia string
|
||||
|
||||
const (
|
||||
// CVSS3ModifiedCiaNone is a constant for "NONE".
|
||||
CVSS3ModifiedCiaNone CVSS3ModifiedCia = "NONE"
|
||||
// CVSS3ModifiedCiaLow is a constant for "LOW".
|
||||
CVSS3ModifiedCiaLow CVSS3ModifiedCia = "LOW"
|
||||
// CVSS3ModifiedCiaHigh is a constant for "HIGH".
|
||||
CVSS3ModifiedCiaHigh CVSS3ModifiedCia = "HIGH"
|
||||
// CVSS3ModifiedCiaNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS3ModifiedCiaNotDefined CVSS3ModifiedCia = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss3ModifiedCiaPattern = alternativesUnmarshal(
|
||||
string(CVSS3ModifiedCiaNone),
|
||||
string(CVSS3ModifiedCiaLow),
|
||||
string(CVSS3ModifiedCiaHigh),
|
||||
string(CVSS3ModifiedCiaNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3ModifiedCia) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3ModifiedCiaPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3ModifiedCia(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3ModifiedPrivilegesRequired represents the modifiedPrivilegesRequiredType in CVSS3.
|
||||
type CVSS3ModifiedPrivilegesRequired string
|
||||
|
||||
const (
|
||||
// CVSS3ModifiedPrivilegesRequiredHigh is a constant for "HIGH".
|
||||
CVSS3ModifiedPrivilegesRequiredHigh CVSS3ModifiedPrivilegesRequired = "HIGH"
|
||||
// CVSS3ModifiedPrivilegesRequiredLow is a constant for "LOW".
|
||||
CVSS3ModifiedPrivilegesRequiredLow CVSS3ModifiedPrivilegesRequired = "LOW"
|
||||
// CVSS3ModifiedPrivilegesRequiredNone is a constant for "NONE".
|
||||
CVSS3ModifiedPrivilegesRequiredNone CVSS3ModifiedPrivilegesRequired = "NONE"
|
||||
// CVSS3ModifiedPrivilegesRequiredNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS3ModifiedPrivilegesRequiredNotDefined CVSS3ModifiedPrivilegesRequired = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss3ModifiedPrivilegesRequiredPattern = alternativesUnmarshal(
|
||||
string(CVSS3ModifiedPrivilegesRequiredHigh),
|
||||
string(CVSS3ModifiedPrivilegesRequiredLow),
|
||||
string(CVSS3ModifiedPrivilegesRequiredNone),
|
||||
string(CVSS3ModifiedPrivilegesRequiredNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3ModifiedPrivilegesRequired) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3ModifiedPrivilegesRequiredPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3ModifiedPrivilegesRequired(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3ModifiedScope represents the modifiedScopeType in CVSS3.
|
||||
type CVSS3ModifiedScope string
|
||||
|
||||
const (
|
||||
// CVSS3ModifiedScopeUnchanged is a constant for "UNCHANGED".
|
||||
CVSS3ModifiedScopeUnchanged CVSS3ModifiedScope = "UNCHANGED"
|
||||
// CVSS3ModifiedScopeChanged is a constant for "CHANGED".
|
||||
CVSS3ModifiedScopeChanged CVSS3ModifiedScope = "CHANGED"
|
||||
// CVSS3ModifiedScopeNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS3ModifiedScopeNotDefined CVSS3ModifiedScope = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss3ModifiedScopePattern = alternativesUnmarshal(
|
||||
string(CVSS3ModifiedScopeUnchanged),
|
||||
string(CVSS3ModifiedScopeChanged),
|
||||
string(CVSS3ModifiedScopeNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3ModifiedScope) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3ModifiedScopePattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3ModifiedScope(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3ModifiedUserInteraction represents the modifiedUserInteractionType in CVSS3.
|
||||
type CVSS3ModifiedUserInteraction string
|
||||
|
||||
const (
|
||||
// CVSS3ModifiedUserInteractionNone is a constant for "NONE".
|
||||
CVSS3ModifiedUserInteractionNone CVSS3ModifiedUserInteraction = "NONE"
|
||||
// CVSS3ModifiedUserInteractionRequired is a constant for "REQUIRED".
|
||||
CVSS3ModifiedUserInteractionRequired CVSS3ModifiedUserInteraction = "REQUIRED"
|
||||
// CVSS3ModifiedUserInteractionNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS3ModifiedUserInteractionNotDefined CVSS3ModifiedUserInteraction = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss3ModifiedUserInteractionPattern = alternativesUnmarshal(
|
||||
string(CVSS3ModifiedUserInteractionNone),
|
||||
string(CVSS3ModifiedUserInteractionRequired),
|
||||
string(CVSS3ModifiedUserInteractionNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3ModifiedUserInteraction) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3ModifiedUserInteractionPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3ModifiedUserInteraction(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3PrivilegesRequired represents the privilegesRequiredType in CVSS3.
|
||||
type CVSS3PrivilegesRequired string
|
||||
|
||||
const (
|
||||
// CVSS3PrivilegesRequiredHigh is a constant for "HIGH".
|
||||
CVSS3PrivilegesRequiredHigh CVSS3PrivilegesRequired = "HIGH"
|
||||
// CVSS3PrivilegesRequiredLow is a constant for "LOW".
|
||||
CVSS3PrivilegesRequiredLow CVSS3PrivilegesRequired = "LOW"
|
||||
// CVSS3PrivilegesRequiredNone is a constant for "NONE".
|
||||
CVSS3PrivilegesRequiredNone CVSS3PrivilegesRequired = "NONE"
|
||||
)
|
||||
|
||||
var cvss3PrivilegesRequiredPattern = alternativesUnmarshal(
|
||||
string(CVSS3PrivilegesRequiredHigh),
|
||||
string(CVSS3PrivilegesRequiredLow),
|
||||
string(CVSS3PrivilegesRequiredNone),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3PrivilegesRequired) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3PrivilegesRequiredPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3PrivilegesRequired(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3RemediationLevel represents the remediationLevelType in CVSS3.
|
||||
type CVSS3RemediationLevel string
|
||||
|
||||
const (
|
||||
// CVSS3RemediationLevelOfficialFix is a constant for "OFFICIAL_FIX".
|
||||
CVSS3RemediationLevelOfficialFix CVSS3RemediationLevel = "OFFICIAL_FIX"
|
||||
// CVSS3RemediationLevelTemporaryFix is a constant for "TEMPORARY_FIX".
|
||||
CVSS3RemediationLevelTemporaryFix CVSS3RemediationLevel = "TEMPORARY_FIX"
|
||||
// CVSS3RemediationLevelWorkaround is a constant for "WORKAROUND".
|
||||
CVSS3RemediationLevelWorkaround CVSS3RemediationLevel = "WORKAROUND"
|
||||
// CVSS3RemediationLevelUnavailable is a constant for "UNAVAILABLE".
|
||||
CVSS3RemediationLevelUnavailable CVSS3RemediationLevel = "UNAVAILABLE"
|
||||
// CVSS3RemediationLevelNotDefined is a constant for "NOT_DEFINED".
|
||||
CVSS3RemediationLevelNotDefined CVSS3RemediationLevel = "NOT_DEFINED"
|
||||
)
|
||||
|
||||
var cvss3RemediationLevelPattern = alternativesUnmarshal(
|
||||
string(CVSS3RemediationLevelOfficialFix),
|
||||
string(CVSS3RemediationLevelTemporaryFix),
|
||||
string(CVSS3RemediationLevelWorkaround),
|
||||
string(CVSS3RemediationLevelUnavailable),
|
||||
string(CVSS3RemediationLevelNotDefined),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3RemediationLevel) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3RemediationLevelPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3RemediationLevel(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3Scope represents the scopeType in CVSS3.
|
||||
type CVSS3Scope string
|
||||
|
||||
const (
|
||||
// CVSS3ScopeUnchanged is a constant for "UNCHANGED".
|
||||
CVSS3ScopeUnchanged CVSS3Scope = "UNCHANGED"
|
||||
// CVSS3ScopeChanged is a constant for "CHANGED".
|
||||
CVSS3ScopeChanged CVSS3Scope = "CHANGED"
|
||||
)
|
||||
|
||||
var cvss3ScopePattern = alternativesUnmarshal(
|
||||
string(CVSS3ScopeUnchanged),
|
||||
string(CVSS3ScopeChanged),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3Scope) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3ScopePattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3Scope(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3Severity represents the severityType in CVSS3.
|
||||
type CVSS3Severity string
|
||||
|
||||
const (
|
||||
// CVSS3SeverityNone is a constant for "NONE".
|
||||
CVSS3SeverityNone CVSS3Severity = "NONE"
|
||||
// CVSS3SeverityLow is a constant for "LOW".
|
||||
CVSS3SeverityLow CVSS3Severity = "LOW"
|
||||
// CVSS3SeverityMedium is a constant for "MEDIUM".
|
||||
CVSS3SeverityMedium CVSS3Severity = "MEDIUM"
|
||||
// CVSS3SeverityHigh is a constant for "HIGH".
|
||||
CVSS3SeverityHigh CVSS3Severity = "HIGH"
|
||||
// CVSS3SeverityCritical is a constant for "CRITICAL".
|
||||
CVSS3SeverityCritical CVSS3Severity = "CRITICAL"
|
||||
)
|
||||
|
||||
var cvss3SeverityPattern = alternativesUnmarshal(
|
||||
string(CVSS3SeverityNone),
|
||||
string(CVSS3SeverityLow),
|
||||
string(CVSS3SeverityMedium),
|
||||
string(CVSS3SeverityHigh),
|
||||
string(CVSS3SeverityCritical),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3Severity) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3SeverityPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3Severity(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// CVSS3UserInteraction represents the userInteractionType in CVSS3.
|
||||
type CVSS3UserInteraction string
|
||||
|
||||
const (
|
||||
// CVSS3UserInteractionNone is a constant for "NONE".
|
||||
CVSS3UserInteractionNone CVSS3UserInteraction = "NONE"
|
||||
// CVSS3UserInteractionRequired is a constant for "REQUIRED".
|
||||
CVSS3UserInteractionRequired CVSS3UserInteraction = "REQUIRED"
|
||||
)
|
||||
|
||||
var cvss3UserInteractionPattern = alternativesUnmarshal(
|
||||
string(CVSS3UserInteractionNone),
|
||||
string(CVSS3UserInteractionRequired),
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *CVSS3UserInteraction) UnmarshalText(data []byte) error {
|
||||
s, err := cvss3UserInteractionPattern(data)
|
||||
if err == nil {
|
||||
*e = CVSS3UserInteraction(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
19
csaf/doc.go
Normal file
19
csaf/doc.go
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
// Package csaf contains the core data models used by the csaf distribution
|
||||
// tools.
|
||||
//
|
||||
// See https://github.com/gocsaf/csaf/tab=readme-ov-file#use-as-go-library
|
||||
// about hints and limits for its use as a library.
|
||||
package csaf
|
||||
|
||||
//go:generate go run ./generate_cvss_enums.go -o cvss20enums.go -i ./schema/cvss-v2.0.json -p CVSS20
|
||||
// Generating only enums for CVSS 3.0 and not for 3.1 since the enums of both of them
|
||||
// are identical.
|
||||
//go:generate go run ./generate_cvss_enums.go -o cvss3enums.go -i ./schema/cvss-v3.0.json -p CVSS3
|
||||
167
csaf/generate_cvss_enums.go
Normal file
167
csaf/generate_cvss_enums.go
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
//go:build ignore
|
||||
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"log"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
)
|
||||
|
||||
// We from Intevation consider the source code parts in the following
|
||||
// template file as too insignificant to be a piece of work that gains
|
||||
// "copyrights" protection in the European Union. So the license(s)
|
||||
// of the output files are fully determined by the input file.
|
||||
const tmplText = `// {{ $.License }}
|
||||
//
|
||||
// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE!
|
||||
|
||||
package csaf
|
||||
|
||||
{{ range $key := .Keys }}
|
||||
{{ $def := index $.Definitions $key }}
|
||||
// {{ $type := printf "%s%s" $.Prefix (typename $key) }}{{ $type }} represents the {{ $key }} in {{ $.Prefix }}.
|
||||
type {{ $type }} string
|
||||
const (
|
||||
{{ range $enum := $def.Enum -}}
|
||||
// {{ $type}}{{ symbol $enum }} is a constant for "{{ $enum }}".
|
||||
{{ $type }}{{ symbol $enum }} {{ $type }} = "{{ $enum }}"
|
||||
{{ end }}
|
||||
)
|
||||
var {{ tolower $.Prefix }}{{ typename $key }}Pattern = alternativesUnmarshal(
|
||||
{{ range $enum := $def.Enum -}}
|
||||
string({{ $type }}{{ symbol $enum }}),
|
||||
{{ end }}
|
||||
)
|
||||
|
||||
// UnmarshalText implements the [encoding.TextUnmarshaler] interface.
|
||||
func (e *{{ $type }}) UnmarshalText(data []byte) error {
|
||||
s, err := {{ tolower $.Prefix }}{{ typename $key }}Pattern(data)
|
||||
if err == nil {
|
||||
*e = {{ $type }}(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
{{ end }}
|
||||
`
|
||||
|
||||
var tmpl = template.Must(template.New("enums").Funcs(funcs).Parse(tmplText))
|
||||
|
||||
type definition struct {
|
||||
Type string `json:"type"`
|
||||
Enum []string `json:"enum"`
|
||||
}
|
||||
|
||||
type schema struct {
|
||||
License []string `json:"license"`
|
||||
Definitions map[string]*definition `json:"definitions"`
|
||||
}
|
||||
|
||||
var funcs = template.FuncMap{
|
||||
"tolower": strings.ToLower,
|
||||
"symbol": func(s string) string {
|
||||
s = strings.ToLower(s)
|
||||
s = strings.ReplaceAll(s, "_", " ")
|
||||
s = strings.Title(s)
|
||||
s = strings.ReplaceAll(s, " ", "")
|
||||
return s
|
||||
},
|
||||
"typename": func(s string) string {
|
||||
if strings.HasSuffix(s, "Type") {
|
||||
s = s[:len(s)-len("Type")]
|
||||
}
|
||||
s = strings.Title(s)
|
||||
return s
|
||||
},
|
||||
}
|
||||
|
||||
func loadSchema(filename string) (*schema, error) {
|
||||
f, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
var s schema
|
||||
if err := misc.StrictJSONParse(f, &s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &s, nil
|
||||
}
|
||||
|
||||
func check(err error) {
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
var (
|
||||
input = flag.String("i", "input", "")
|
||||
output = flag.String("o", "output", "")
|
||||
prefix = flag.String("p", "prefix", "")
|
||||
)
|
||||
flag.Parse()
|
||||
if *input == "" {
|
||||
log.Fatalln("missing schema")
|
||||
}
|
||||
if *output == "" {
|
||||
log.Fatalln("missing output")
|
||||
}
|
||||
if *prefix == "" {
|
||||
log.Fatalln("missing prefix")
|
||||
}
|
||||
|
||||
s, err := loadSchema(*input)
|
||||
check(err)
|
||||
|
||||
defs := make([]string, 0, len(s.Definitions))
|
||||
for k, v := range s.Definitions {
|
||||
if v.Type == "string" && len(v.Enum) > 0 {
|
||||
defs = append(defs, k)
|
||||
}
|
||||
}
|
||||
sort.Strings(defs)
|
||||
|
||||
license := "determine license(s) from input file and replace this line"
|
||||
|
||||
pattern := regexp.MustCompile(`Copyright \(c\) (\d+), FIRST.ORG, INC.`)
|
||||
for _, line := range s.License {
|
||||
if m := pattern.FindStringSubmatch(line); m != nil {
|
||||
license = fmt.Sprintf(
|
||||
"SPDX-License-Identifier: BSD-3-Clause\n"+
|
||||
"// SPDX-FileCopyrightText: %s FIRST.ORG, INC.", m[1])
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var source bytes.Buffer
|
||||
|
||||
check(tmpl.Execute(&source, map[string]any{
|
||||
"License": license,
|
||||
"Prefix": *prefix,
|
||||
"Definitions": s.Definitions,
|
||||
"Keys": defs,
|
||||
}))
|
||||
|
||||
formatted, err := format.Source(source.Bytes())
|
||||
check(err)
|
||||
|
||||
check(os.WriteFile(*output, formatted, 0644))
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -17,7 +17,8 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
// TLPLabel is the traffic light policy of the CSAF.
|
||||
|
|
@ -37,11 +38,11 @@ const (
|
|||
)
|
||||
|
||||
var tlpLabelPattern = alternativesUnmarshal(
|
||||
string(TLPLabelUnlabeled),
|
||||
string(TLPLabelWhite),
|
||||
string(TLPLabelGreen),
|
||||
string(TLPLabelAmber),
|
||||
string(TLPLabelRed),
|
||||
TLPLabelUnlabeled,
|
||||
TLPLabelWhite,
|
||||
TLPLabelGreen,
|
||||
TLPLabelAmber,
|
||||
TLPLabelRed,
|
||||
)
|
||||
|
||||
// JSONURL is an URL to JSON document.
|
||||
|
|
@ -218,12 +219,20 @@ type AggregatorCSAFProvider struct {
|
|||
Mirrors []ProviderURL `json:"mirrors,omitempty"` // required
|
||||
}
|
||||
|
||||
// AggregatorCSAFPublisher reflects one publisher in an aggregator.
|
||||
type AggregatorCSAFPublisher struct {
|
||||
Metadata *AggregatorCSAFProviderMetadata `json:"metadata,omitempty"` // required
|
||||
Mirrors []ProviderURL `json:"mirrors,omitempty"` // required
|
||||
UpdateInterval string `json:"update_interval,omitempty"` // required
|
||||
}
|
||||
|
||||
// Aggregator is the CSAF Aggregator.
|
||||
type Aggregator struct {
|
||||
Aggregator *AggregatorInfo `json:"aggregator,omitempty"` // required
|
||||
Version *AggregatorVersion `json:"aggregator_version,omitempty"` // required
|
||||
CanonicalURL *AggregatorURL `json:"canonical_url,omitempty"` // required
|
||||
CSAFProviders []*AggregatorCSAFProvider `json:"csaf_providers,omitempty"` // required
|
||||
CSAFPublishers []*AggregatorCSAFPublisher `json:"csaf_publishers,omitempty"`
|
||||
LastUpdated *TimeStamp `json:"last_updated,omitempty"` // required
|
||||
}
|
||||
|
||||
|
|
@ -290,10 +299,7 @@ func (acp *AggregatorCSAFProvider) Validate() error {
|
|||
if acp == nil {
|
||||
return errors.New("aggregator.csaf_providers[] not allowed to be nil")
|
||||
}
|
||||
if err := acp.Metadata.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
return acp.Metadata.Validate()
|
||||
}
|
||||
|
||||
// Validate validates the current state of the Aggregator.
|
||||
|
|
@ -313,11 +319,29 @@ func (a *Aggregator) Validate() error {
|
|||
}
|
||||
}
|
||||
if a.LastUpdated == nil {
|
||||
return errors.New("Aggregator.LastUpdate == nil")
|
||||
return errors.New("aggregator.LastUpdate == nil")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UnmarshalText implements the encoding.TextUnmarshaller interface.
|
||||
func (mdv *MetadataVersion) UnmarshalText(data []byte) error {
|
||||
s, err := metadataVersionPattern(data)
|
||||
if err == nil {
|
||||
*mdv = MetadataVersion(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// UnmarshalText implements the encoding.TextUnmarshaller interface.
|
||||
func (mdr *MetadataRole) UnmarshalText(data []byte) error {
|
||||
s, err := metadataRolePattern(data)
|
||||
if err == nil {
|
||||
*mdr = MetadataRole(s)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// UnmarshalText implements the encoding.TextUnmarshaller interface.
|
||||
func (ac *AggregatorCategory) UnmarshalText(data []byte) error {
|
||||
s, err := aggregatorCategoryPattern(data)
|
||||
|
|
@ -448,6 +472,18 @@ func (pmd *ProviderMetadata) Defaults() {
|
|||
}
|
||||
}
|
||||
|
||||
// AddDirectoryDistribution adds a directory based distribution
|
||||
// with a given url to the provider metadata.
|
||||
func (pmd *ProviderMetadata) AddDirectoryDistribution(url string) {
|
||||
// Avoid duplicates.
|
||||
for i := range pmd.Distributions {
|
||||
if pmd.Distributions[i].DirectoryURL == url {
|
||||
return
|
||||
}
|
||||
}
|
||||
pmd.Distributions = append(pmd.Distributions, Distribution{DirectoryURL: url})
|
||||
}
|
||||
|
||||
// Validate checks if the feed is valid.
|
||||
// Returns an error if the validation fails otherwise nil.
|
||||
func (f *Feed) Validate() error {
|
||||
|
|
@ -540,7 +576,6 @@ func (d *Distribution) Validate() error {
|
|||
// Validate checks if the provider metadata is valid.
|
||||
// Returns an error if the validation fails otherwise nil.
|
||||
func (pmd *ProviderMetadata) Validate() error {
|
||||
|
||||
switch {
|
||||
case pmd.CanonicalURL == nil:
|
||||
return errors.New("canonical_url is mandatory")
|
||||
|
|
@ -581,7 +616,7 @@ func (pmd *ProviderMetadata) SetLastUpdated(t time.Time) {
|
|||
// If there is no such key it is append to the list of keys.
|
||||
func (pmd *ProviderMetadata) SetPGP(fingerprint, url string) {
|
||||
for i := range pmd.PGPKeys {
|
||||
if pmd.PGPKeys[i].Fingerprint == Fingerprint(fingerprint) {
|
||||
if strings.EqualFold(string(pmd.PGPKeys[i].Fingerprint), fingerprint) {
|
||||
pmd.PGPKeys[i].URL = &url
|
||||
return
|
||||
}
|
||||
|
|
@ -660,8 +695,7 @@ func (pmd *ProviderMetadata) WriteTo(w io.Writer) (int64, error) {
|
|||
func LoadProviderMetadata(r io.Reader) (*ProviderMetadata, error) {
|
||||
|
||||
var pmd ProviderMetadata
|
||||
dec := json.NewDecoder(r)
|
||||
if err := dec.Decode(&pmd); err != nil {
|
||||
if err := misc.StrictJSONParse(r, &pmd); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
|
|
|||
369
csaf/providermetaloader.go
Normal file
369
csaf/providermetaloader.go
Normal file
|
|
@ -0,0 +1,369 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2023 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2023 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package csaf
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
// ProviderMetadataLoader helps load provider-metadata.json from
|
||||
// the various locations.
|
||||
type ProviderMetadataLoader struct {
|
||||
client util.Client
|
||||
already map[string]*LoadedProviderMetadata
|
||||
messages ProviderMetadataLoadMessages
|
||||
}
|
||||
|
||||
// ProviderMetadataLoadMessageType is the type of the message.
|
||||
type ProviderMetadataLoadMessageType int
|
||||
|
||||
const (
|
||||
// JSONDecodingFailed indicates problems with JSON decoding
|
||||
JSONDecodingFailed ProviderMetadataLoadMessageType = iota
|
||||
// SchemaValidationFailed indicates a general problem with schema validation.
|
||||
SchemaValidationFailed
|
||||
// SchemaValidationFailedDetail is a failure detail in schema validation.
|
||||
SchemaValidationFailedDetail
|
||||
// HTTPFailed indicates that loading on HTTP level failed.
|
||||
HTTPFailed
|
||||
// ExtraProviderMetadataFound indicates an extra PMD found in security.txt.
|
||||
ExtraProviderMetadataFound
|
||||
// WellknownSecurityMismatch indicates that the PMDs found under wellknown and
|
||||
// in the security do not match.
|
||||
WellknownSecurityMismatch
|
||||
// IgnoreProviderMetadata indicates that an extra PMD was ignored.
|
||||
IgnoreProviderMetadata
|
||||
)
|
||||
|
||||
// ProviderMetadataLoadMessage is a message generated while loading
|
||||
// a provider meta data file.
|
||||
type ProviderMetadataLoadMessage struct {
|
||||
Type ProviderMetadataLoadMessageType
|
||||
Message string
|
||||
}
|
||||
|
||||
// ProviderMetadataLoadMessages is a list of loading messages.
|
||||
type ProviderMetadataLoadMessages []ProviderMetadataLoadMessage
|
||||
|
||||
// LoadedProviderMetadata represents a loaded provider metadata.
|
||||
type LoadedProviderMetadata struct {
|
||||
// URL is location where the document was found.
|
||||
URL string
|
||||
// Document is the de-serialized JSON document.
|
||||
Document any
|
||||
// Hash is a SHA256 sum over the document.
|
||||
Hash []byte
|
||||
// Messages are the error message happened while loading.
|
||||
Messages ProviderMetadataLoadMessages
|
||||
}
|
||||
|
||||
// Add appends a message to the list of loading messages.
|
||||
func (pmlm *ProviderMetadataLoadMessages) Add(
|
||||
typ ProviderMetadataLoadMessageType,
|
||||
msg string,
|
||||
) {
|
||||
*pmlm = append(*pmlm, ProviderMetadataLoadMessage{
|
||||
Type: typ,
|
||||
Message: msg,
|
||||
})
|
||||
}
|
||||
|
||||
// AppendUnique appends unique messages from a second list.
|
||||
func (pmlm *ProviderMetadataLoadMessages) AppendUnique(other ProviderMetadataLoadMessages) {
|
||||
next:
|
||||
for _, o := range other {
|
||||
for _, m := range *pmlm {
|
||||
if m == o {
|
||||
continue next
|
||||
}
|
||||
}
|
||||
*pmlm = append(*pmlm, o)
|
||||
}
|
||||
}
|
||||
|
||||
// Valid returns true if the loaded document is valid.
|
||||
func (lpm *LoadedProviderMetadata) Valid() bool {
|
||||
return lpm != nil && lpm.Document != nil && lpm.Hash != nil
|
||||
}
|
||||
|
||||
// NewProviderMetadataLoader create a new loader.
|
||||
func NewProviderMetadataLoader(client util.Client) *ProviderMetadataLoader {
|
||||
return &ProviderMetadataLoader{
|
||||
client: client,
|
||||
already: map[string]*LoadedProviderMetadata{},
|
||||
}
|
||||
}
|
||||
|
||||
// Enumerate lists all PMD files that can be found under the given domain.
|
||||
// As specified in CSAF 2.0, it looks for PMDs using the well-known URL and
|
||||
// the security.txt, and if no PMDs have been found, it also checks the DNS-URL.
|
||||
func (pmdl *ProviderMetadataLoader) Enumerate(domain string) []*LoadedProviderMetadata {
|
||||
|
||||
// Our array of PMDs to be found
|
||||
var resPMDs []*LoadedProviderMetadata
|
||||
|
||||
// Check direct path
|
||||
if strings.HasPrefix(domain, "https://") {
|
||||
return []*LoadedProviderMetadata{pmdl.loadFromURL(domain)}
|
||||
}
|
||||
|
||||
// First try the well-known path.
|
||||
wellknownURL := "https://" + domain + "/.well-known/csaf/provider-metadata.json"
|
||||
|
||||
wellknownResult := pmdl.loadFromURL(wellknownURL)
|
||||
|
||||
// Validate the candidate and add to the result array
|
||||
if wellknownResult.Valid() {
|
||||
slog.Debug("Found well known provider-metadata.json")
|
||||
resPMDs = append(resPMDs, wellknownResult)
|
||||
}
|
||||
|
||||
// Next load the PMDs from security.txt
|
||||
secResults := pmdl.loadFromSecurity(domain)
|
||||
slog.Info("Found provider metadata results in security.txt", "num", len(secResults))
|
||||
|
||||
for _, result := range secResults {
|
||||
if result.Valid() {
|
||||
resPMDs = append(resPMDs, result)
|
||||
}
|
||||
}
|
||||
|
||||
// According to the spec, only if no PMDs have been found, the should DNS URL be used
|
||||
if len(resPMDs) > 0 {
|
||||
return resPMDs
|
||||
}
|
||||
dnsURL := "https://csaf.data.security." + domain
|
||||
return []*LoadedProviderMetadata{pmdl.loadFromURL(dnsURL)}
|
||||
}
|
||||
|
||||
// Load loads one valid provider metadata for a given path.
|
||||
// If the domain starts with `https://` it only attempts to load
|
||||
// the data from that URL.
|
||||
func (pmdl *ProviderMetadataLoader) Load(domain string) *LoadedProviderMetadata {
|
||||
|
||||
// Check direct path
|
||||
if strings.HasPrefix(domain, "https://") {
|
||||
return pmdl.loadFromURL(domain)
|
||||
}
|
||||
|
||||
// First try the well-known path.
|
||||
wellknownURL := "https://" + domain + "/.well-known/csaf/provider-metadata.json"
|
||||
|
||||
wellknownResult := pmdl.loadFromURL(wellknownURL)
|
||||
|
||||
// Valid provider metadata under well-known.
|
||||
var wellknownGood *LoadedProviderMetadata
|
||||
|
||||
// We have a candidate.
|
||||
if wellknownResult.Valid() {
|
||||
wellknownGood = wellknownResult
|
||||
} else {
|
||||
pmdl.messages.AppendUnique(wellknownResult.Messages)
|
||||
}
|
||||
|
||||
// Next load the PMDs from security.txt
|
||||
secGoods := pmdl.loadFromSecurity(domain)
|
||||
|
||||
// Mention extra CSAF entries in security.txt.
|
||||
ignoreExtras := func() {
|
||||
for _, extra := range secGoods[1:] {
|
||||
pmdl.messages.Add(
|
||||
ExtraProviderMetadataFound,
|
||||
fmt.Sprintf("Ignoring extra CSAF entry in security.txt: %s", extra.URL))
|
||||
}
|
||||
}
|
||||
|
||||
// security.txt contains good entries.
|
||||
if len(secGoods) > 0 {
|
||||
// we already have a good wellknown, take it.
|
||||
if wellknownGood != nil {
|
||||
// check if first of security urls is identical to wellknown.
|
||||
if bytes.Equal(wellknownGood.Hash, secGoods[0].Hash) {
|
||||
ignoreExtras()
|
||||
} else {
|
||||
// Complaint about not matching.
|
||||
pmdl.messages.Add(
|
||||
WellknownSecurityMismatch,
|
||||
"First entry of security.txt and well-known don't match.")
|
||||
// List all the security urls.
|
||||
for _, sec := range secGoods {
|
||||
pmdl.messages.Add(
|
||||
IgnoreProviderMetadata,
|
||||
fmt.Sprintf("Ignoring CSAF entry in security.txt: %s", sec.URL))
|
||||
}
|
||||
}
|
||||
// Take the good well-known.
|
||||
wellknownGood.Messages = pmdl.messages
|
||||
return wellknownGood
|
||||
}
|
||||
|
||||
// Don't have well-known. Take first good from security.txt.
|
||||
ignoreExtras()
|
||||
secGoods[0].Messages = pmdl.messages
|
||||
return secGoods[0]
|
||||
}
|
||||
|
||||
// If we have a good well-known take it.
|
||||
if wellknownGood != nil {
|
||||
wellknownGood.Messages = pmdl.messages
|
||||
return wellknownGood
|
||||
}
|
||||
|
||||
// Last resort: fall back to DNS.
|
||||
dnsURL := "https://csaf.data.security." + domain
|
||||
dnsURLResult := pmdl.loadFromURL(dnsURL)
|
||||
pmdl.messages.AppendUnique(dnsURLResult.Messages) // keep order of messages consistent (i.e. last occurred message is last element)
|
||||
dnsURLResult.Messages = pmdl.messages
|
||||
return dnsURLResult
|
||||
}
|
||||
|
||||
// loadFromSecurity loads the PMDs mentioned in the security.txt. Only valid PMDs are returned.
|
||||
func (pmdl *ProviderMetadataLoader) loadFromSecurity(domain string) []*LoadedProviderMetadata {
|
||||
|
||||
// If .well-known fails try legacy location.
|
||||
for _, path := range []string{
|
||||
"https://" + domain + "/.well-known/security.txt",
|
||||
"https://" + domain + "/security.txt",
|
||||
} {
|
||||
res, err := pmdl.client.Get(path)
|
||||
if err != nil {
|
||||
pmdl.messages.Add(
|
||||
HTTPFailed,
|
||||
fmt.Sprintf("Fetching %q failed: %v", path, err))
|
||||
continue
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
pmdl.messages.Add(
|
||||
HTTPFailed,
|
||||
fmt.Sprintf("Fetching %q failed: %s (%d)", path, res.Status, res.StatusCode))
|
||||
continue
|
||||
}
|
||||
|
||||
// Extract all potential URLs from CSAF.
|
||||
urls, err := func() ([]string, error) {
|
||||
defer res.Body.Close()
|
||||
return ExtractProviderURL(res.Body, true)
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
pmdl.messages.Add(
|
||||
HTTPFailed,
|
||||
fmt.Sprintf("Loading %q failed: %v", path, err))
|
||||
continue
|
||||
}
|
||||
|
||||
var loaded []*LoadedProviderMetadata
|
||||
|
||||
// Load the URLs
|
||||
nextURL:
|
||||
for _, url := range urls {
|
||||
lpmd := pmdl.loadFromURL(url)
|
||||
// If loading failed note it down.
|
||||
if !lpmd.Valid() {
|
||||
pmdl.messages.AppendUnique(lpmd.Messages)
|
||||
continue
|
||||
}
|
||||
// Check for duplicates
|
||||
for _, l := range loaded {
|
||||
if l == lpmd {
|
||||
continue nextURL
|
||||
}
|
||||
}
|
||||
loaded = append(loaded, lpmd)
|
||||
}
|
||||
|
||||
return loaded
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadFromURL loads a provider metadata from a given URL.
|
||||
func (pmdl *ProviderMetadataLoader) loadFromURL(path string) *LoadedProviderMetadata {
|
||||
|
||||
result := LoadedProviderMetadata{URL: path}
|
||||
|
||||
res, err := pmdl.client.Get(path)
|
||||
if err != nil {
|
||||
result.Messages.Add(
|
||||
HTTPFailed,
|
||||
fmt.Sprintf("fetching %q failed: %v", path, err))
|
||||
return &result
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
result.Messages.Add(
|
||||
HTTPFailed,
|
||||
fmt.Sprintf("fetching %q failed: %s (%d)", path, res.Status, res.StatusCode))
|
||||
return &result
|
||||
}
|
||||
|
||||
// TODO: Check for application/json and log it.
|
||||
|
||||
defer res.Body.Close()
|
||||
|
||||
// Calculate checksum for later comparison.
|
||||
hash := sha256.New()
|
||||
|
||||
tee := io.TeeReader(res.Body, hash)
|
||||
|
||||
var doc any
|
||||
|
||||
if err := misc.StrictJSONParse(tee, &doc); err != nil {
|
||||
result.Messages.Add(
|
||||
JSONDecodingFailed,
|
||||
fmt.Sprintf("JSON decoding failed: %v", err))
|
||||
return &result
|
||||
}
|
||||
|
||||
// Before checking the err lets check if we had the same
|
||||
// document before. If so it will have failed parsing before.
|
||||
|
||||
sum := hash.Sum(nil)
|
||||
key := string(sum)
|
||||
|
||||
// If we already have loaded it return the cached result.
|
||||
if r := pmdl.already[key]; r != nil {
|
||||
return r
|
||||
}
|
||||
|
||||
// write it back as loaded
|
||||
|
||||
switch errors, err := ValidateProviderMetadata(doc); {
|
||||
case err != nil:
|
||||
result.Messages.Add(
|
||||
SchemaValidationFailed,
|
||||
fmt.Sprintf("%s: Validating against JSON schema failed: %v", path, err))
|
||||
|
||||
case len(errors) > 0:
|
||||
result.Messages = []ProviderMetadataLoadMessage{{
|
||||
Type: SchemaValidationFailed,
|
||||
Message: fmt.Sprintf("%s: Validating against JSON schema failed", path),
|
||||
}}
|
||||
for _, msg := range errors {
|
||||
result.Messages.Add(
|
||||
SchemaValidationFailedDetail,
|
||||
strings.ReplaceAll(msg, `%`, `%%`))
|
||||
}
|
||||
default:
|
||||
// Only store in result if validation passed.
|
||||
result.Document = doc
|
||||
result.Hash = sum
|
||||
}
|
||||
|
||||
pmdl.already[key] = &result
|
||||
return &result
|
||||
}
|
||||
343
csaf/remotevalidation.go
Normal file
343
csaf/remotevalidation.go
Normal file
|
|
@ -0,0 +1,343 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package csaf
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/zlib"
|
||||
"crypto/sha256"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"sync"
|
||||
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
bolt "go.etcd.io/bbolt"
|
||||
)
|
||||
|
||||
// defaultURL is default URL where to look for
|
||||
// the validation service.
|
||||
const (
|
||||
defaultURL = "http://localhost:8082"
|
||||
validationPath = "/api/v1/validate"
|
||||
)
|
||||
|
||||
// defaultPresets are the presets to check.
|
||||
var defaultPresets = []string{"mandatory"}
|
||||
|
||||
var (
|
||||
validationsBucket = []byte("validations")
|
||||
cacheVersionKey = []byte("version")
|
||||
cacheVersion = []byte("1")
|
||||
)
|
||||
|
||||
// RemoteValidatorOptions are the configuation options
|
||||
// of the remote validation service.
|
||||
type RemoteValidatorOptions struct {
|
||||
URL string `json:"url" toml:"url"`
|
||||
Presets []string `json:"presets" toml:"presets"`
|
||||
Cache string `json:"cache" toml:"cache"`
|
||||
}
|
||||
|
||||
type test struct {
|
||||
Type string `json:"type"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// outDocument is the document send to the remote validation service.
|
||||
type outDocument struct {
|
||||
Tests []test `json:"tests"`
|
||||
Document any `json:"document"`
|
||||
}
|
||||
|
||||
// RemoteTestResult are any given test-result by a remote validator test.
|
||||
type RemoteTestResult struct {
|
||||
Message string `json:"message"`
|
||||
InstancePath string `json:"instancePath"`
|
||||
}
|
||||
|
||||
// RemoteTest is the result of the remote tests
|
||||
// recieved by the remote validation service.
|
||||
type RemoteTest struct {
|
||||
Name string `json:"name"`
|
||||
Valid bool `json:"isValid"`
|
||||
Error []RemoteTestResult `json:"errors"`
|
||||
Warning []RemoteTestResult `json:"warnings"`
|
||||
Info []RemoteTestResult `json:"infos"`
|
||||
}
|
||||
|
||||
// RemoteValidationResult is the document recieved from the remote validation service.
|
||||
type RemoteValidationResult struct {
|
||||
Valid bool `json:"isValid"`
|
||||
Tests []RemoteTest `json:"tests"`
|
||||
}
|
||||
|
||||
type cache interface {
|
||||
get(key []byte) ([]byte, error)
|
||||
set(key []byte, value []byte) error
|
||||
Close() error
|
||||
}
|
||||
|
||||
// RemoteValidator validates an advisory document remotely.
|
||||
type RemoteValidator interface {
|
||||
Validate(doc any) (*RemoteValidationResult, error)
|
||||
Close() error
|
||||
}
|
||||
|
||||
// SynchronizedRemoteValidator returns a serialized variant
|
||||
// of the given remote validator.
|
||||
func SynchronizedRemoteValidator(validator RemoteValidator) RemoteValidator {
|
||||
return &syncedRemoteValidator{RemoteValidator: validator}
|
||||
}
|
||||
|
||||
// remoteValidator is an implementation of an RemoteValidator.
|
||||
type remoteValidator struct {
|
||||
url string
|
||||
tests []test
|
||||
cache cache
|
||||
}
|
||||
|
||||
// syncedRemoteValidator is a serialized variant of a remote validator.
|
||||
type syncedRemoteValidator struct {
|
||||
sync.Mutex
|
||||
RemoteValidator
|
||||
}
|
||||
|
||||
// Validate implements the validation part of the RemoteValidator interface.
|
||||
func (srv *syncedRemoteValidator) Validate(doc any) (*RemoteValidationResult, error) {
|
||||
srv.Lock()
|
||||
defer srv.Unlock()
|
||||
return srv.RemoteValidator.Validate(doc)
|
||||
}
|
||||
|
||||
// Validate implements the closing part of the RemoteValidator interface.
|
||||
func (srv *syncedRemoteValidator) Close() error {
|
||||
srv.Lock()
|
||||
defer srv.Unlock()
|
||||
return srv.RemoteValidator.Close()
|
||||
}
|
||||
|
||||
// prepareTests precompiles the presets for the remote check.
|
||||
func prepareTests(presets []string) []test {
|
||||
if len(presets) == 0 {
|
||||
presets = defaultPresets
|
||||
}
|
||||
tests := make([]test, len(presets))
|
||||
for i := range tests {
|
||||
tests[i] = test{Type: "preset", Name: presets[i]}
|
||||
}
|
||||
return tests
|
||||
}
|
||||
|
||||
// prepareURL prepares the URL to be called for validation.
|
||||
func prepareURL(url string) string {
|
||||
if url == "" {
|
||||
url = defaultURL
|
||||
}
|
||||
return url + validationPath
|
||||
}
|
||||
|
||||
// prepareCache sets up the cache if it is configured.
|
||||
func prepareCache(config string) (cache, error) {
|
||||
if config == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
db, err := bolt.Open(config, 0600, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create the bucket.
|
||||
if err := db.Update(func(tx *bolt.Tx) error {
|
||||
|
||||
// Create a new bucket with version set.
|
||||
create := func() error {
|
||||
b, err := tx.CreateBucket(validationsBucket)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return b.Put(cacheVersionKey, cacheVersion)
|
||||
}
|
||||
|
||||
b := tx.Bucket(validationsBucket)
|
||||
|
||||
if b == nil { // Bucket does not exists -> create.
|
||||
return create()
|
||||
}
|
||||
// Bucket exists.
|
||||
if v := b.Get(cacheVersionKey); !bytes.Equal(v, cacheVersion) {
|
||||
// version mismatch -> delete and re-create.
|
||||
if err := tx.DeleteBucket(validationsBucket); err != nil {
|
||||
return err
|
||||
}
|
||||
return create()
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
db.Close()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return boltCache{db}, nil
|
||||
}
|
||||
|
||||
// boltCache is cache implementation based on the bolt datastore.
|
||||
type boltCache struct{ *bolt.DB }
|
||||
|
||||
// get implements the fetch part of the cache interface.
|
||||
func (bc boltCache) get(key []byte) ([]byte, error) {
|
||||
var value []byte
|
||||
if err := bc.View(func(tx *bolt.Tx) error {
|
||||
b := tx.Bucket(validationsBucket)
|
||||
value = b.Get(key)
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// set implements the store part of the cache interface.
|
||||
func (bc boltCache) set(key, value []byte) error {
|
||||
return bc.Update(func(tx *bolt.Tx) error {
|
||||
b := tx.Bucket(validationsBucket)
|
||||
return b.Put(key, value)
|
||||
})
|
||||
}
|
||||
|
||||
// Open opens a new remoteValidator.
|
||||
func (rvo *RemoteValidatorOptions) Open() (RemoteValidator, error) {
|
||||
cache, err := prepareCache(rvo.Cache)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &remoteValidator{
|
||||
url: prepareURL(rvo.URL),
|
||||
tests: prepareTests(rvo.Presets),
|
||||
cache: cache,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Close closes the remote validator.
|
||||
func (v *remoteValidator) Close() error {
|
||||
if v.cache != nil {
|
||||
return v.cache.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// key calculates the key for an advisory document and presets.
|
||||
func (v *remoteValidator) key(doc any) ([]byte, error) {
|
||||
h := sha256.New()
|
||||
if err := json.NewEncoder(h).Encode(doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for i := range v.tests {
|
||||
if _, err := h.Write([]byte(v.tests[i].Name)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return h.Sum(nil), nil
|
||||
}
|
||||
|
||||
// deserialize revives a remote validation result from a cache value.
|
||||
func deserialize(value []byte) (*RemoteValidationResult, error) {
|
||||
r, err := zlib.NewReader(bytes.NewReader(value))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer r.Close()
|
||||
var rvr RemoteValidationResult
|
||||
if err := misc.StrictJSONParse(r, &rvr); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &rvr, nil
|
||||
}
|
||||
|
||||
// Validate executes a remote validation of an advisory.
|
||||
func (v *remoteValidator) Validate(doc any) (*RemoteValidationResult, error) {
|
||||
|
||||
var key []byte
|
||||
|
||||
// First look into cache.
|
||||
if v.cache != nil {
|
||||
var err error
|
||||
if key, err = v.key(doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
value, err := v.cache.get(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if value != nil {
|
||||
return deserialize(value)
|
||||
}
|
||||
}
|
||||
|
||||
o := outDocument{
|
||||
Document: doc,
|
||||
Tests: v.tests,
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if err := json.NewEncoder(&buf).Encode(&o); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp, err := http.Post(
|
||||
v.url,
|
||||
"application/json",
|
||||
bytes.NewReader(buf.Bytes()))
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf(
|
||||
"POST failed: %s (%d)", resp.Status, resp.StatusCode)
|
||||
}
|
||||
|
||||
var (
|
||||
zout *zlib.Writer
|
||||
rvr RemoteValidationResult
|
||||
)
|
||||
|
||||
if err := func() error {
|
||||
defer resp.Body.Close()
|
||||
var in io.Reader
|
||||
// If we are caching record the incoming data and compress it.
|
||||
if key != nil {
|
||||
buf.Reset() // reuse the out buffer.
|
||||
zout = zlib.NewWriter(&buf)
|
||||
in = io.TeeReader(resp.Body, zout)
|
||||
} else {
|
||||
// no cache -> process directly.
|
||||
in = resp.Body
|
||||
}
|
||||
return misc.StrictJSONParse(in, &rvr)
|
||||
}(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Store in cache
|
||||
if key != nil {
|
||||
if err := zout.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// The document is now compressed in the buffer.
|
||||
if err := v.cache.set(key, buf.Bytes()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return &rvr, nil
|
||||
}
|
||||
167
csaf/rolie.go
167
csaf/rolie.go
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -14,9 +14,130 @@ import (
|
|||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/internal/misc"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
// ROLIEServiceWorkspaceCollectionCategoriesCategory is a category in a ROLIE service collection.
|
||||
type ROLIEServiceWorkspaceCollectionCategoriesCategory struct {
|
||||
Scheme string `json:"scheme"`
|
||||
Term string `json:"term"`
|
||||
}
|
||||
|
||||
// ROLIEServiceWorkspaceCollectionCategories are categories in a ROLIE service collection.
|
||||
type ROLIEServiceWorkspaceCollectionCategories struct {
|
||||
Category []ROLIEServiceWorkspaceCollectionCategoriesCategory `json:"category"`
|
||||
}
|
||||
|
||||
// ROLIEServiceWorkspaceCollection is a collection in a ROLIE service.
|
||||
type ROLIEServiceWorkspaceCollection struct {
|
||||
Title string `json:"title"`
|
||||
HRef string `json:"href"`
|
||||
Categories ROLIEServiceWorkspaceCollectionCategories `json:"categories"`
|
||||
}
|
||||
|
||||
// ROLIEServiceWorkspace is a workspace of a ROLIE service.
|
||||
type ROLIEServiceWorkspace struct {
|
||||
Title string `json:"title"`
|
||||
Collection []ROLIEServiceWorkspaceCollection `json:"collection"`
|
||||
}
|
||||
|
||||
// ROLIEService is a ROLIE service.
|
||||
type ROLIEService struct {
|
||||
Workspace []ROLIEServiceWorkspace `json:"workspace"`
|
||||
}
|
||||
|
||||
// ROLIEServiceDocument is a ROLIE service document.
|
||||
type ROLIEServiceDocument struct {
|
||||
Service ROLIEService `json:"service"`
|
||||
}
|
||||
|
||||
// LoadROLIEServiceDocument loads a ROLIE service document from a reader.
|
||||
func LoadROLIEServiceDocument(r io.Reader) (*ROLIEServiceDocument, error) {
|
||||
var rsd ROLIEServiceDocument
|
||||
if err := misc.StrictJSONParse(r, &rsd); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &rsd, nil
|
||||
}
|
||||
|
||||
// WriteTo saves a ROLIE service document to a writer.
|
||||
func (rsd *ROLIEServiceDocument) WriteTo(w io.Writer) (int64, error) {
|
||||
nw := util.NWriter{Writer: w, N: 0}
|
||||
enc := json.NewEncoder(&nw)
|
||||
enc.SetIndent("", " ")
|
||||
err := enc.Encode(rsd)
|
||||
return nw.N, err
|
||||
}
|
||||
|
||||
// ROLIECategories is a list of ROLIE categories.
|
||||
type ROLIECategories struct {
|
||||
Category []ROLIECategory `json:"category"`
|
||||
}
|
||||
|
||||
// ROLIECategoryDocument is a ROLIE category document.
|
||||
type ROLIECategoryDocument struct {
|
||||
Categories ROLIECategories `json:"categories"`
|
||||
}
|
||||
|
||||
// NewROLIECategoryDocument creates a new ROLIE category document from a list
|
||||
// of categories.
|
||||
func NewROLIECategoryDocument(categories ...string) *ROLIECategoryDocument {
|
||||
rcd := &ROLIECategoryDocument{}
|
||||
rcd.Merge(categories...)
|
||||
return rcd
|
||||
}
|
||||
|
||||
// Merge merges the given categories into the existing ones.
|
||||
// The results indicates if there were changes.
|
||||
func (rcd *ROLIECategoryDocument) Merge(categories ...string) bool {
|
||||
index := util.Set[string]{}
|
||||
for i := range rcd.Categories.Category {
|
||||
index.Add(rcd.Categories.Category[i].Term)
|
||||
}
|
||||
|
||||
oldLen := len(index)
|
||||
|
||||
for _, cat := range categories {
|
||||
if index.Contains(cat) {
|
||||
continue
|
||||
}
|
||||
index.Add(cat)
|
||||
rcd.Categories.Category = append(
|
||||
rcd.Categories.Category, ROLIECategory{Term: cat})
|
||||
}
|
||||
|
||||
if len(index) == oldLen {
|
||||
// No new categories
|
||||
return false
|
||||
}
|
||||
|
||||
// Re-establish order.
|
||||
sort.Slice(rcd.Categories.Category, func(i, j int) bool {
|
||||
return rcd.Categories.Category[i].Term < rcd.Categories.Category[j].Term
|
||||
})
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// LoadROLIECategoryDocument loads a ROLIE category document from a reader.
|
||||
func LoadROLIECategoryDocument(r io.Reader) (*ROLIECategoryDocument, error) {
|
||||
var rcd ROLIECategoryDocument
|
||||
if err := misc.StrictJSONParse(r, &rcd); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &rcd, nil
|
||||
}
|
||||
|
||||
// WriteTo saves a ROLIE category document to a writer.
|
||||
func (rcd *ROLIECategoryDocument) WriteTo(w io.Writer) (int64, error) {
|
||||
nw := util.NWriter{Writer: w, N: 0}
|
||||
enc := json.NewEncoder(&nw)
|
||||
enc.SetIndent("", " ")
|
||||
err := enc.Encode(rcd)
|
||||
return nw.N, err
|
||||
}
|
||||
|
||||
// Link for ROLIE.
|
||||
type Link struct {
|
||||
Rel string `json:"rel"`
|
||||
|
|
@ -25,7 +146,7 @@ type Link struct {
|
|||
|
||||
// ROLIECategory for ROLIE.
|
||||
type ROLIECategory struct {
|
||||
Scheme string `json:"scheme"`
|
||||
Scheme string `json:"scheme,omitempty"`
|
||||
Term string `json:"term"`
|
||||
}
|
||||
|
||||
|
|
@ -48,14 +169,22 @@ type Format struct {
|
|||
|
||||
// Entry for ROLIE.
|
||||
type Entry struct {
|
||||
Base *string `json:"base,omitempty"`
|
||||
LanguageTag *string `json:"lang,omitempty"`
|
||||
Author *json.RawMessage `json:"author,omitempty"`
|
||||
Category []ROLIECategory `json:"category,omitempty"`
|
||||
Content Content `json:"content"`
|
||||
Contributor *json.RawMessage `json:"contributor,omitempty"`
|
||||
ID string `json:"id"`
|
||||
Titel string `json:"title"`
|
||||
Link []Link `json:"link"`
|
||||
Published TimeStamp `json:"published"`
|
||||
Updated TimeStamp `json:"updated"`
|
||||
Rights *json.RawMessage `json:"rights,omitempty"`
|
||||
Source *json.RawMessage `json:"source,omitempty"`
|
||||
Summary *Summary `json:"summary,omitempty"`
|
||||
Content Content `json:"content"`
|
||||
Titel string `json:"title"`
|
||||
Updated TimeStamp `json:"updated"`
|
||||
Format Format `json:"format"`
|
||||
Property *json.RawMessage `json:"property,omitempty"`
|
||||
}
|
||||
|
||||
// FeedData is the content of the ROLIE feed.
|
||||
|
|
@ -65,7 +194,7 @@ type FeedData struct {
|
|||
Link []Link `json:"link,omitempty"`
|
||||
Category []ROLIECategory `json:"category,omitempty"`
|
||||
Updated TimeStamp `json:"updated"`
|
||||
Entry []*Entry `json:"entry,omitempty"`
|
||||
Entry []*Entry `json:"entry"`
|
||||
}
|
||||
|
||||
// ROLIEFeed is a ROLIE feed.
|
||||
|
|
@ -75,9 +204,8 @@ type ROLIEFeed struct {
|
|||
|
||||
// LoadROLIEFeed loads a ROLIE feed from a reader.
|
||||
func LoadROLIEFeed(r io.Reader) (*ROLIEFeed, error) {
|
||||
dec := json.NewDecoder(r)
|
||||
var rf ROLIEFeed
|
||||
if err := dec.Decode(&rf); err != nil {
|
||||
if err := misc.StrictJSONParse(r, &rf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &rf, nil
|
||||
|
|
@ -103,15 +231,11 @@ func (rf *ROLIEFeed) EntryByID(id string) *Entry {
|
|||
return nil
|
||||
}
|
||||
|
||||
// Files extracts the files from the feed.
|
||||
func (rf *ROLIEFeed) Files() []string {
|
||||
var files []string
|
||||
for _, f := range rf.Feed.Entry {
|
||||
for i := range f.Link {
|
||||
files = append(files, f.Link[i].HRef)
|
||||
// Entries visits the entries of this feed.
|
||||
func (rf *ROLIEFeed) Entries(fn func(*Entry)) {
|
||||
for _, e := range rf.Feed.Entry {
|
||||
fn(e)
|
||||
}
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
// SortEntriesByUpdated sorts all the entries in the feed
|
||||
|
|
@ -122,3 +246,8 @@ func (rf *ROLIEFeed) SortEntriesByUpdated() {
|
|||
return time.Time(entries[j].Updated).Before(time.Time(entries[i].Updated))
|
||||
})
|
||||
}
|
||||
|
||||
// CountEntries returns the number of entries within the feed
|
||||
func (rf *ROLIEFeed) CountEntries() int {
|
||||
return len(rf.Feed.Entry)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,10 @@
|
|||
"title": "Link",
|
||||
"description": "Specifies the JSON link.",
|
||||
"type": "object",
|
||||
"required": ["rel", "href"],
|
||||
"required": [
|
||||
"rel",
|
||||
"href"
|
||||
],
|
||||
"properties": {
|
||||
"href": {
|
||||
"title": "Hyper reference",
|
||||
|
|
@ -31,7 +34,9 @@
|
|||
"title": "Relationship",
|
||||
"description": "Contains the relationship value of the link.",
|
||||
"type": "string",
|
||||
"enum": ["self"]
|
||||
"enum": [
|
||||
"self"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -42,7 +47,10 @@
|
|||
"title": "Link",
|
||||
"description": "Specifies a single link.",
|
||||
"type": "object",
|
||||
"required": ["rel", "href"],
|
||||
"required": [
|
||||
"rel",
|
||||
"href"
|
||||
],
|
||||
"properties": {
|
||||
"href": {
|
||||
"title": "Hyper reference",
|
||||
|
|
@ -61,13 +69,22 @@
|
|||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["feed"],
|
||||
"required": [
|
||||
"feed"
|
||||
],
|
||||
"properties": {
|
||||
"feed": {
|
||||
"title": "CSAF ROLIE feed",
|
||||
"description": "Contains all information of the feed.",
|
||||
"type": "object",
|
||||
"required": ["id", "title", "link", "category", "updated", "entry"],
|
||||
"required": [
|
||||
"id",
|
||||
"title",
|
||||
"link",
|
||||
"category",
|
||||
"updated",
|
||||
"entry"
|
||||
],
|
||||
"properties": {
|
||||
"id": {
|
||||
"title": "ID",
|
||||
|
|
@ -96,19 +113,26 @@
|
|||
"title": "CSAF ROLIE category",
|
||||
"description": "Contains the required ROLIE category value.",
|
||||
"type": "object",
|
||||
"required": ["scheme", "term"],
|
||||
"required": [
|
||||
"scheme",
|
||||
"term"
|
||||
],
|
||||
"properties": {
|
||||
"scheme": {
|
||||
"title": "Scheme",
|
||||
"description": "Contains the URI of the scheme to use.",
|
||||
"type": "string",
|
||||
"enum": ["urn:ietf:params:rolie:category:information-type"]
|
||||
"enum": [
|
||||
"urn:ietf:params:rolie:category:information-type"
|
||||
]
|
||||
},
|
||||
"term": {
|
||||
"title": "Term",
|
||||
"description": "Contains the term that is valid in the context of the scheme.",
|
||||
"type": "string",
|
||||
"enum": ["csaf"]
|
||||
"enum": [
|
||||
"csaf"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -119,7 +143,10 @@
|
|||
"title": "Category",
|
||||
"description": "Specifies a single category.",
|
||||
"type": "object",
|
||||
"required": ["scheme", "term"],
|
||||
"required": [
|
||||
"scheme",
|
||||
"term"
|
||||
],
|
||||
"properties": {
|
||||
"scheme": {
|
||||
"title": "Scheme",
|
||||
|
|
@ -146,7 +173,6 @@
|
|||
"title": "List of Entries",
|
||||
"description": "Contains a list of feed entries.",
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"uniqueItems": true,
|
||||
"items": {
|
||||
"title": "Entry",
|
||||
|
|
@ -193,13 +219,13 @@
|
|||
"format": "date-time"
|
||||
},
|
||||
"summary": {
|
||||
"title": "",
|
||||
"description": "",
|
||||
"title": "Summary",
|
||||
"description": "Contains the summary of the CSAF document.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"content": {
|
||||
"title": "",
|
||||
"description": "",
|
||||
"title": "Content",
|
||||
"description": "Contains the actual text of the summary.",
|
||||
"type": "string",
|
||||
"minLength": 1
|
||||
}
|
||||
|
|
@ -209,7 +235,10 @@
|
|||
"title": "Content of the entry",
|
||||
"description": "Contains information about the content.",
|
||||
"type": "object",
|
||||
"required": ["type", "src"],
|
||||
"required": [
|
||||
"type",
|
||||
"src"
|
||||
],
|
||||
"properties": {
|
||||
"src": {
|
||||
"title": "Source Code",
|
||||
|
|
@ -220,15 +249,20 @@
|
|||
"title": "MIME type",
|
||||
"description": "Contains the MIME type of the content.",
|
||||
"type": "string",
|
||||
"enum": ["application/json"]
|
||||
"enum": [
|
||||
"application/json"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"format": {
|
||||
"title": "",
|
||||
"description": "",
|
||||
"title": "Format",
|
||||
"description": "Contains information about the format of the entry.",
|
||||
"type": "object",
|
||||
"required": ["schema", "version"],
|
||||
"required": [
|
||||
"schema",
|
||||
"version"
|
||||
],
|
||||
"properties": {
|
||||
"schema": {
|
||||
"title": "Schema of the entry",
|
||||
|
|
@ -242,7 +276,9 @@
|
|||
"title": "CSAF Version",
|
||||
"description": "Contains the CSAF version the document was written in.",
|
||||
"type": "string",
|
||||
"enum": ["2.0"]
|
||||
"enum": [
|
||||
"2.0"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -175,7 +175,7 @@
|
|||
"type": "object",
|
||||
"required": [
|
||||
"metadata",
|
||||
"mirror",
|
||||
"mirrors",
|
||||
"update_interval"
|
||||
],
|
||||
"properties": {
|
||||
|
|
|
|||
2
csaf/schema/cvss-v2.0.json.license
Normal file
2
csaf/schema/cvss-v2.0.json.license
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
SPDX-License-Identifier: BSD-3-Clause
|
||||
SPDX-FileCopyrightText: 2017 FIRST.ORG, INC.
|
||||
|
|
@ -108,7 +108,7 @@
|
|||
},
|
||||
"vectorString": {
|
||||
"type": "string",
|
||||
"pattern": "^CVSS:3[.]0/((AV:[NALP]|AC:[LH]|PR:[UNLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XUNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[UNLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XUNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$"
|
||||
"pattern": "^CVSS:3[.]0/((AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$"
|
||||
},
|
||||
"attackVector": { "$ref": "#/definitions/attackVectorType" },
|
||||
"attackComplexity": { "$ref": "#/definitions/attackComplexityType" },
|
||||
|
|
|
|||
2
csaf/schema/cvss-v3.0.json.license
Normal file
2
csaf/schema/cvss-v3.0.json.license
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
SPDX-License-Identifier: BSD-3-Clause
|
||||
SPDX-FileCopyrightText: 2017 FIRST.ORG, INC.
|
||||
2
csaf/schema/cvss-v3.1.json.license
Normal file
2
csaf/schema/cvss-v3.1.json.license
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
SPDX-License-Identifier: BSD-3-Clause
|
||||
SPDX-FileCopyrightText: 2021 FIRST.ORG, INC.
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -11,7 +11,7 @@ package csaf
|
|||
import (
|
||||
"time"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
"github.com/gocsaf/csaf/v3/util"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
@ -41,7 +41,7 @@ type AdvisorySummary struct {
|
|||
// with the help of an expression evaluator expr.
|
||||
func NewAdvisorySummary(
|
||||
pe *util.PathEval,
|
||||
doc interface{},
|
||||
doc any,
|
||||
) (*AdvisorySummary, error) {
|
||||
|
||||
e := &AdvisorySummary{
|
||||
|
|
|
|||
280
csaf/util.go
280
csaf/util.go
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -10,223 +10,10 @@ package csaf
|
|||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/csaf-poc/csaf_distribution/util"
|
||||
)
|
||||
|
||||
// LoadedProviderMetadata represents a loaded provider metadata.
|
||||
type LoadedProviderMetadata struct {
|
||||
// URL is location where the document was found.
|
||||
URL string
|
||||
// Document is the de-serialized JSON document.
|
||||
Document interface{}
|
||||
// Hash is a SHA256 sum over the document.
|
||||
Hash []byte
|
||||
// Messages are the error message happened while loading.
|
||||
Messages []string
|
||||
}
|
||||
|
||||
// LoadProviderMetadataFromURL loads a provider metadata from a given URL.
|
||||
// Returns nil if the document was not found.
|
||||
func LoadProviderMetadataFromURL(client util.Client, url string) *LoadedProviderMetadata {
|
||||
|
||||
res, err := client.Get(url)
|
||||
|
||||
if err != nil || res.StatusCode != http.StatusOK {
|
||||
// Treat as not found.
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: Check for application/json and log it.
|
||||
|
||||
defer res.Body.Close()
|
||||
|
||||
// Calculate checksum for later comparison.
|
||||
hash := sha256.New()
|
||||
|
||||
result := LoadedProviderMetadata{URL: url}
|
||||
|
||||
tee := io.TeeReader(res.Body, hash)
|
||||
|
||||
if err := json.NewDecoder(tee).Decode(&result.Document); err != nil {
|
||||
result.Messages = []string{fmt.Sprintf("%s: Decoding JSON failed: %v", url, err)}
|
||||
return &result
|
||||
}
|
||||
|
||||
result.Hash = hash.Sum(nil)
|
||||
|
||||
errors, err := ValidateProviderMetadata(result.Document)
|
||||
if err != nil {
|
||||
result.Messages = []string{
|
||||
fmt.Sprintf("%s: Validating against JSON schema failed: %v", url, err)}
|
||||
return &result
|
||||
}
|
||||
|
||||
if len(errors) > 0 {
|
||||
result.Messages = []string{
|
||||
fmt.Sprintf("%s: Validating against JSON schema failed: %v", url, err)}
|
||||
for _, msg := range errors {
|
||||
result.Messages = append(result.Messages, strings.ReplaceAll(msg, `%`, `%%`))
|
||||
}
|
||||
}
|
||||
|
||||
return &result
|
||||
}
|
||||
|
||||
// LoadProviderMetadatasFromSecurity loads a secturity.txt,
|
||||
// extracts and the CSAF urls from the document.
|
||||
// Returns nil if no url was successfully found.
|
||||
func LoadProviderMetadatasFromSecurity(client util.Client, path string) []*LoadedProviderMetadata {
|
||||
|
||||
res, err := client.Get(path)
|
||||
|
||||
if err != nil || res.StatusCode != http.StatusOK {
|
||||
// Treat as not found.
|
||||
return nil
|
||||
}
|
||||
|
||||
// Extract all potential URLs from CSAF.
|
||||
urls, err := func() ([]string, error) {
|
||||
defer res.Body.Close()
|
||||
return ExtractProviderURL(res.Body, true)
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
// Treat as not found
|
||||
return nil
|
||||
}
|
||||
|
||||
var results []*LoadedProviderMetadata
|
||||
|
||||
// Load the URLs
|
||||
for _, url := range urls {
|
||||
if result := LoadProviderMetadataFromURL(client, url); result != nil {
|
||||
results = append(results, result)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// LoadProviderMetadataForDomain loads a provider metadata for a given domain.
|
||||
// Returns nil if no provider metadata was found.
|
||||
// The logging can be use to track the errors happening while loading.
|
||||
func LoadProviderMetadataForDomain(
|
||||
client util.Client,
|
||||
domain string,
|
||||
logging func(format string, args ...interface{}),
|
||||
) *LoadedProviderMetadata {
|
||||
|
||||
if logging == nil {
|
||||
logging = func(format string, args ...interface{}) {
|
||||
log.Printf("FindProviderMetadata: "+format+"\n", args...)
|
||||
}
|
||||
}
|
||||
|
||||
// Valid provider metadata under well-known.
|
||||
var wellknownGood *LoadedProviderMetadata
|
||||
|
||||
// First try well-know path
|
||||
wellknownURL := "https://" + domain + "/.well-known/csaf/provider-metadata.json"
|
||||
wellknownResult := LoadProviderMetadataFromURL(client, wellknownURL)
|
||||
|
||||
if wellknownResult == nil {
|
||||
logging("%s not found.", wellknownURL)
|
||||
} else if len(wellknownResult.Messages) > 0 {
|
||||
// There are issues
|
||||
for _, msg := range wellknownResult.Messages {
|
||||
logging(msg)
|
||||
}
|
||||
} else {
|
||||
// We have a candidate.
|
||||
wellknownGood = wellknownResult
|
||||
}
|
||||
|
||||
// Next load the PMDs from security.txt
|
||||
secURL := "https://" + domain + "/.well-known/security.txt"
|
||||
secResults := LoadProviderMetadatasFromSecurity(client, secURL)
|
||||
|
||||
if secResults == nil {
|
||||
logging("%s failed to load.", secURL)
|
||||
} else {
|
||||
// Filter out the results which are valid.
|
||||
var secGoods []*LoadedProviderMetadata
|
||||
|
||||
for _, result := range secResults {
|
||||
if len(result.Messages) > 0 {
|
||||
for _, msg := range result.Messages {
|
||||
logging(msg)
|
||||
}
|
||||
} else {
|
||||
secGoods = append(secGoods, result)
|
||||
}
|
||||
}
|
||||
|
||||
// security.txt contains good entries.
|
||||
if len(secGoods) > 0 {
|
||||
// we have a wellknown good take it.
|
||||
if wellknownGood != nil {
|
||||
// check if first of security urls is identical to wellknown.
|
||||
if bytes.Equal(wellknownGood.Hash, secGoods[0].Hash) {
|
||||
// Mention extra CSAF entries
|
||||
for _, extra := range secGoods[1:] {
|
||||
logging("Ignoring extra CSAF entry in security.txt: %s", extra.URL)
|
||||
}
|
||||
} else {
|
||||
// Complaint about not matching.
|
||||
logging("First entry of security.txt and well-known don't match.")
|
||||
// List all the security urls.
|
||||
for _, sec := range secGoods {
|
||||
logging("Ignoring CSAF entry in security.txt: %s", sec.URL)
|
||||
}
|
||||
}
|
||||
// Take the good well-known.
|
||||
return wellknownGood
|
||||
}
|
||||
|
||||
// Don't have well-known. Take first good from security.txt.
|
||||
// Mention extra CSAF entries
|
||||
for _, extra := range secGoods[1:] {
|
||||
logging("Ignoring extra CSAF entry in security.txt: %s", extra.URL)
|
||||
}
|
||||
|
||||
return secGoods[0]
|
||||
}
|
||||
}
|
||||
|
||||
// If we have a good well-known take it.
|
||||
if wellknownGood != nil {
|
||||
return wellknownGood
|
||||
}
|
||||
|
||||
// Last resort fall back to DNS.
|
||||
|
||||
dnsURL := "https://csaf.data.security." + domain
|
||||
dnsResult := LoadProviderMetadataFromURL(client, dnsURL)
|
||||
|
||||
if dnsResult == nil {
|
||||
logging("%s not found.", dnsURL)
|
||||
} else if len(dnsResult.Messages) > 0 {
|
||||
for _, msg := range dnsResult.Messages {
|
||||
logging(msg)
|
||||
}
|
||||
} else {
|
||||
// DNS seems to be okay.
|
||||
return dnsResult
|
||||
}
|
||||
|
||||
// We failed all.
|
||||
return nil
|
||||
}
|
||||
|
||||
// ExtractProviderURL extracts URLs of provider metadata.
|
||||
// If all is true all URLs are returned. Otherwise only the first is returned.
|
||||
func ExtractProviderURL(r io.Reader, all bool) ([]string, error) {
|
||||
|
|
@ -249,3 +36,64 @@ func ExtractProviderURL(r io.Reader, all bool) ([]string, error) {
|
|||
}
|
||||
return urls, nil
|
||||
}
|
||||
|
||||
// CollectProductIdentificationHelpers returns a slice of all ProductIdentificationHelper
|
||||
// for a given ProductID.
|
||||
func (pt *ProductTree) CollectProductIdentificationHelpers(id ProductID) []*ProductIdentificationHelper {
|
||||
var helpers []*ProductIdentificationHelper
|
||||
pt.FindProductIdentificationHelpers(
|
||||
id, func(helper *ProductIdentificationHelper) {
|
||||
helpers = append(helpers, helper)
|
||||
})
|
||||
return helpers
|
||||
}
|
||||
|
||||
// FindProductIdentificationHelpers calls visit on all ProductIdentificationHelper
|
||||
// for a given ProductID by iterating over all full product names and branches
|
||||
// recursively available in the ProductTree.
|
||||
func (pt *ProductTree) FindProductIdentificationHelpers(
|
||||
id ProductID,
|
||||
visit func(*ProductIdentificationHelper),
|
||||
) {
|
||||
// Iterate over all full product names
|
||||
if fpns := pt.FullProductNames; fpns != nil {
|
||||
for _, fpn := range *fpns {
|
||||
if fpn != nil &&
|
||||
fpn.ProductID != nil && *fpn.ProductID == id &&
|
||||
fpn.ProductIdentificationHelper != nil {
|
||||
visit(fpn.ProductIdentificationHelper)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Iterate over branches recursively
|
||||
var recBranch func(b *Branch)
|
||||
recBranch = func(b *Branch) {
|
||||
if b == nil {
|
||||
return
|
||||
}
|
||||
if fpn := b.Product; fpn != nil &&
|
||||
fpn.ProductID != nil && *fpn.ProductID == id &&
|
||||
fpn.ProductIdentificationHelper != nil {
|
||||
visit(fpn.ProductIdentificationHelper)
|
||||
}
|
||||
for _, c := range b.Branches {
|
||||
recBranch(c)
|
||||
}
|
||||
}
|
||||
for _, b := range pt.Branches {
|
||||
recBranch(b)
|
||||
}
|
||||
|
||||
// Iterate over relationships
|
||||
if rels := pt.RelationShips; rels != nil {
|
||||
for _, rel := range *rels {
|
||||
if rel != nil {
|
||||
if fpn := rel.FullProductName; fpn != nil && fpn.ProductID != nil &&
|
||||
*fpn.ProductID == id && fpn.ProductIdentificationHelper != nil {
|
||||
visit(fpn.ProductIdentificationHelper)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
182
csaf/util_test.go
Normal file
182
csaf/util_test.go
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
||||
package csaf
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestProductTree_FindProductIdentificationHelpers(t *testing.T) {
|
||||
type fields struct {
|
||||
Branches Branches
|
||||
FullProductNames *FullProductNames
|
||||
RelationShips *Relationships
|
||||
}
|
||||
type args struct {
|
||||
id ProductID
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
fields fields
|
||||
args args
|
||||
want []*ProductIdentificationHelper
|
||||
}{
|
||||
{
|
||||
name: "empty product tree",
|
||||
args: args{
|
||||
id: "CSAFPID-0001",
|
||||
},
|
||||
want: nil,
|
||||
},
|
||||
{
|
||||
name: "product tree with matching full product names",
|
||||
fields: fields{
|
||||
FullProductNames: &FullProductNames{{
|
||||
ProductID: &[]ProductID{"CSAFPID-0001"}[0],
|
||||
ProductIdentificationHelper: &ProductIdentificationHelper{
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0],
|
||||
},
|
||||
}},
|
||||
},
|
||||
args: args{
|
||||
id: "CSAFPID-0001",
|
||||
},
|
||||
want: []*ProductIdentificationHelper{{
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0],
|
||||
}},
|
||||
},
|
||||
{
|
||||
name: "product tree with no matching full product names",
|
||||
fields: fields{
|
||||
FullProductNames: &FullProductNames{{
|
||||
ProductID: &[]ProductID{"CSAFPID-0001"}[0],
|
||||
ProductIdentificationHelper: &ProductIdentificationHelper{
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0],
|
||||
},
|
||||
}},
|
||||
},
|
||||
args: args{
|
||||
id: "CSAFPID-0002",
|
||||
},
|
||||
want: nil,
|
||||
},
|
||||
{
|
||||
name: "product tree with matching branches",
|
||||
fields: fields{
|
||||
Branches: Branches{{
|
||||
Name: &[]string{"beta"}[0],
|
||||
Product: &FullProductName{
|
||||
ProductID: &[]ProductID{"CSAFPID-0001"}[0],
|
||||
ProductIdentificationHelper: &ProductIdentificationHelper{
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0],
|
||||
},
|
||||
},
|
||||
Branches: Branches{{
|
||||
Name: &[]string{"beta-2"}[0],
|
||||
Product: &FullProductName{
|
||||
ProductID: &[]ProductID{"CSAFPID-0001"}[0],
|
||||
ProductIdentificationHelper: &ProductIdentificationHelper{
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta-2:*:*:*:*:*:*"}[0],
|
||||
},
|
||||
},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
args: args{
|
||||
id: "CSAFPID-0001",
|
||||
},
|
||||
want: []*ProductIdentificationHelper{{
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0],
|
||||
}, {
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta-2:*:*:*:*:*:*"}[0],
|
||||
}},
|
||||
},
|
||||
{
|
||||
name: "product tree with no matching branches",
|
||||
fields: fields{
|
||||
Branches: Branches{{
|
||||
Name: &[]string{"beta"}[0],
|
||||
Product: &FullProductName{
|
||||
ProductID: &[]ProductID{"CSAFPID-0001"}[0],
|
||||
ProductIdentificationHelper: &ProductIdentificationHelper{
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0],
|
||||
},
|
||||
},
|
||||
Branches: Branches{{
|
||||
Name: &[]string{"beta-2"}[0],
|
||||
Product: &FullProductName{
|
||||
ProductID: &[]ProductID{"CSAFPID-0001"}[0],
|
||||
ProductIdentificationHelper: &ProductIdentificationHelper{
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta-2:*:*:*:*:*:*"}[0],
|
||||
},
|
||||
},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
args: args{
|
||||
id: "CSAFPID-0002",
|
||||
},
|
||||
want: nil,
|
||||
},
|
||||
{
|
||||
name: "product tree with matching relationships",
|
||||
fields: fields{
|
||||
RelationShips: &Relationships{{
|
||||
FullProductName: &FullProductName{
|
||||
ProductID: &[]ProductID{"CSAFPID-0001"}[0],
|
||||
ProductIdentificationHelper: &ProductIdentificationHelper{
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0],
|
||||
},
|
||||
},
|
||||
}},
|
||||
},
|
||||
args: args{
|
||||
id: "CSAFPID-0001",
|
||||
},
|
||||
want: []*ProductIdentificationHelper{{
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0],
|
||||
}},
|
||||
},
|
||||
{
|
||||
name: "product tree with no matching relationships",
|
||||
fields: fields{
|
||||
RelationShips: &Relationships{{
|
||||
FullProductName: &FullProductName{
|
||||
ProductID: &[]ProductID{"CSAFPID-0001"}[0],
|
||||
ProductIdentificationHelper: &ProductIdentificationHelper{
|
||||
CPE: &[]CPE{"cpe:2.3:a:microsoft:internet_explorer:1.0.0:beta:*:*:*:*:*:*"}[0],
|
||||
},
|
||||
},
|
||||
}},
|
||||
},
|
||||
args: args{
|
||||
id: "CSAFPID-0002",
|
||||
},
|
||||
want: nil,
|
||||
},
|
||||
}
|
||||
|
||||
t.Parallel()
|
||||
for _, testToRun := range tests {
|
||||
test := testToRun
|
||||
t.Run(test.name, func(tt *testing.T) {
|
||||
tt.Parallel()
|
||||
pt := &ProductTree{
|
||||
Branches: test.fields.Branches,
|
||||
FullProductNames: test.fields.FullProductNames,
|
||||
RelationShips: test.fields.RelationShips,
|
||||
}
|
||||
if got := pt.CollectProductIdentificationHelpers(test.args.id); !reflect.DeepEqual(got, test.want) {
|
||||
tt.Errorf("ProductTree.FindProductIdentificationHelpers() = %v, want %v",
|
||||
got, test.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
// This file is Free Software under the MIT License
|
||||
// without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
// This file is Free Software under the Apache-2.0 License
|
||||
// without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
//
|
||||
// SPDX-License-Identifier: MIT
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
// SPDX-FileCopyrightText: 2021 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
// Software-Engineering: 2021 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -10,12 +10,17 @@ package csaf
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
_ "embed" // Used for embedding.
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v5"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
)
|
||||
|
||||
//go:embed schema/csaf_json_schema.json
|
||||
|
|
@ -39,63 +44,92 @@ var aggregatorSchema []byte
|
|||
//go:embed schema/ROLIE_feed_json_schema.json
|
||||
var rolieSchema []byte
|
||||
|
||||
var (
|
||||
compiledCSAFSchema compiledSchema
|
||||
compiledProviderSchema compiledSchema
|
||||
compiledAggregatorSchema compiledSchema
|
||||
compiledRolieSchema compiledSchema
|
||||
)
|
||||
|
||||
func init() {
|
||||
compiledCSAFSchema.compiler([]schemaData{
|
||||
{"https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json", csafSchema},
|
||||
{"https://www.first.org/cvss/cvss-v2.0.json", cvss20},
|
||||
{"https://www.first.org/cvss/cvss-v3.0.json", cvss30},
|
||||
{"https://www.first.org/cvss/cvss-v3.1.json", cvss31},
|
||||
})
|
||||
compiledProviderSchema.compiler([]schemaData{
|
||||
{"https://docs.oasis-open.org/csaf/csaf/v2.0/provider_json_schema.json", providerSchema},
|
||||
{"https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json", csafSchema},
|
||||
})
|
||||
compiledAggregatorSchema.compiler([]schemaData{
|
||||
{"https://docs.oasis-open.org/csaf/csaf/v2.0/aggregator_json_schema.json", aggregatorSchema},
|
||||
{"https://docs.oasis-open.org/csaf/csaf/v2.0/provider_json_schema.json", providerSchema},
|
||||
{"https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json", csafSchema},
|
||||
})
|
||||
compiledRolieSchema.compiler([]schemaData{
|
||||
{"https://raw.githubusercontent.com/tschmidtb51/csaf/ROLIE-schema/csaf_2.0/json_schema/ROLIE_feed_json_schema.json", rolieSchema},
|
||||
})
|
||||
}
|
||||
|
||||
type schemaData struct {
|
||||
url string
|
||||
data []byte
|
||||
}
|
||||
|
||||
type compiledSchema struct {
|
||||
url string
|
||||
once sync.Once
|
||||
compile func()
|
||||
err error
|
||||
compiled *jsonschema.Schema
|
||||
}
|
||||
|
||||
func (cs *compiledSchema) compiler(sds []schemaData) {
|
||||
if len(sds) == 0 {
|
||||
panic("missing schema data")
|
||||
const (
|
||||
csafSchemaURL = "https://docs.oasis-open.org/csaf/csaf/v2.0/csaf_json_schema.json"
|
||||
providerSchemaURL = "https://docs.oasis-open.org/csaf/csaf/v2.0/provider_json_schema.json"
|
||||
aggregatorSchemaURL = "https://docs.oasis-open.org/csaf/csaf/v2.0/aggregator_json_schema.json"
|
||||
cvss20SchemaURL = "https://www.first.org/cvss/cvss-v2.0.json"
|
||||
cvss30SchemaURL = "https://www.first.org/cvss/cvss-v3.0.json"
|
||||
cvss31SchemaURL = "https://www.first.org/cvss/cvss-v3.1.json"
|
||||
rolieSchemaURL = "https://raw.githubusercontent.com/tschmidtb51/csaf/ROLIE-schema/csaf_2.0/json_schema/ROLIE_feed_json_schema.json"
|
||||
)
|
||||
|
||||
var (
|
||||
compiledCSAFSchema = compiledSchema{url: csafSchemaURL}
|
||||
compiledProviderSchema = compiledSchema{url: providerSchemaURL}
|
||||
compiledAggregatorSchema = compiledSchema{url: aggregatorSchemaURL}
|
||||
compiledRolieSchema = compiledSchema{url: rolieSchemaURL}
|
||||
)
|
||||
|
||||
type schemaLoader http.Client
|
||||
|
||||
func (l *schemaLoader) loadHTTPURL(url string) (any, error) {
|
||||
client := (*http.Client)(l)
|
||||
resp, err := client.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cs.compile = func() {
|
||||
c := jsonschema.NewCompiler()
|
||||
for _, s := range sds {
|
||||
if cs.err = c.AddResource(
|
||||
s.url, bytes.NewReader(s.data)); cs.err != nil {
|
||||
return
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("%s returned status code %d", url, resp.StatusCode)
|
||||
}
|
||||
|
||||
return jsonschema.UnmarshalJSON(resp.Body)
|
||||
}
|
||||
|
||||
// Load loads the schema from the specified url.
|
||||
func (l *schemaLoader) Load(url string) (any, error) {
|
||||
loader := func(data []byte) (any, error) {
|
||||
return jsonschema.UnmarshalJSON(bytes.NewReader(data))
|
||||
}
|
||||
cs.compiled, cs.err = c.Compile(sds[0].url)
|
||||
switch url {
|
||||
case csafSchemaURL:
|
||||
return loader(csafSchema)
|
||||
case cvss20SchemaURL:
|
||||
return loader(cvss20)
|
||||
case cvss30SchemaURL:
|
||||
return loader(cvss30)
|
||||
case cvss31SchemaURL:
|
||||
return loader(cvss31)
|
||||
case providerSchemaURL:
|
||||
return loader(providerSchema)
|
||||
case aggregatorSchemaURL:
|
||||
return loader(aggregatorSchema)
|
||||
case rolieSchemaURL:
|
||||
return loader(rolieSchema)
|
||||
default:
|
||||
// Fallback to http loader
|
||||
return l.loadHTTPURL(url)
|
||||
}
|
||||
}
|
||||
|
||||
func (cs *compiledSchema) validate(doc interface{}) ([]string, error) {
|
||||
func newSchemaLoader(insecure bool) *schemaLoader {
|
||||
httpLoader := schemaLoader(http.Client{
|
||||
Timeout: 15 * time.Second,
|
||||
})
|
||||
if insecure {
|
||||
httpLoader.Transport = &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||
}
|
||||
}
|
||||
return &httpLoader
|
||||
}
|
||||
|
||||
func (cs *compiledSchema) compile() {
|
||||
c := jsonschema.NewCompiler()
|
||||
c.AssertFormat()
|
||||
c.UseLoader(newSchemaLoader(false))
|
||||
cs.compiled, cs.err = c.Compile(cs.url)
|
||||
}
|
||||
|
||||
func (cs *compiledSchema) validate(doc any) ([]string, error) {
|
||||
cs.once.Do(cs.compile)
|
||||
|
||||
if cs.err != nil {
|
||||
|
|
@ -107,7 +141,8 @@ func (cs *compiledSchema) validate(doc interface{}) ([]string, error) {
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
valErr, ok := err.(*jsonschema.ValidationError)
|
||||
var valErr *jsonschema.ValidationError
|
||||
ok := errors.As(err, &valErr)
|
||||
if !ok {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -131,21 +166,21 @@ func (cs *compiledSchema) validate(doc interface{}) ([]string, error) {
|
|||
if pi != pj {
|
||||
return pi < pj
|
||||
}
|
||||
return errs[i].Error < errs[j].Error
|
||||
return errs[i].Error.String() < errs[j].Error.String()
|
||||
})
|
||||
|
||||
res := make([]string, 0, len(errs))
|
||||
|
||||
for i := range errs {
|
||||
e := &errs[i]
|
||||
if e.Error == "" {
|
||||
if e.Error == nil {
|
||||
continue
|
||||
}
|
||||
loc := e.InstanceLocation
|
||||
if loc == "" {
|
||||
loc = e.AbsoluteKeywordLocation
|
||||
}
|
||||
res = append(res, loc+": "+e.Error)
|
||||
res = append(res, loc+": "+e.Error.String())
|
||||
}
|
||||
|
||||
return res, nil
|
||||
|
|
@ -153,24 +188,24 @@ func (cs *compiledSchema) validate(doc interface{}) ([]string, error) {
|
|||
|
||||
// ValidateCSAF validates the document doc against the JSON schema
|
||||
// of CSAF.
|
||||
func ValidateCSAF(doc interface{}) ([]string, error) {
|
||||
func ValidateCSAF(doc any) ([]string, error) {
|
||||
return compiledCSAFSchema.validate(doc)
|
||||
}
|
||||
|
||||
// ValidateProviderMetadata validates the document doc against the JSON schema
|
||||
// of provider metadata.
|
||||
func ValidateProviderMetadata(doc interface{}) ([]string, error) {
|
||||
func ValidateProviderMetadata(doc any) ([]string, error) {
|
||||
return compiledProviderSchema.validate(doc)
|
||||
}
|
||||
|
||||
// ValidateAggregator validates the document doc against the JSON schema
|
||||
// of aggregator.
|
||||
func ValidateAggregator(doc interface{}) ([]string, error) {
|
||||
func ValidateAggregator(doc any) ([]string, error) {
|
||||
return compiledAggregatorSchema.validate(doc)
|
||||
}
|
||||
|
||||
// ValidateROLIE validates the ROLIE feed against the JSON schema
|
||||
// of ROLIE
|
||||
func ValidateROLIE(doc interface{}) ([]string, error) {
|
||||
func ValidateROLIE(doc any) ([]string, error) {
|
||||
return compiledRolieSchema.validate(doc)
|
||||
}
|
||||
|
|
|
|||
23
docs/Development.md
Normal file
23
docs/Development.md
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
# Development
|
||||
|
||||
## Supported Go versions
|
||||
|
||||
We support the latest version and the one before
|
||||
the latest version of Go (currently 1.24 and 1.25).
|
||||
|
||||
## Generated files
|
||||
|
||||
Some source code files are machine generated. At the moment these are only
|
||||
[cvss20enums.go](../csaf/cvss20enums.go) and [cvss3enums.go](../csaf/cvss3enums.go) on the
|
||||
basis of the [Advisory JSON schema](../csaf/schema/csaf_json_schema.json).
|
||||
|
||||
If you change the source files please regenerate the generated files
|
||||
with `go generate ./...` in the root folder and add the updated files
|
||||
to the version control.
|
||||
|
||||
If you plan to add further machine generated files ensure that they
|
||||
are marked with comments like
|
||||
```
|
||||
// THIS FILE IS MACHINE GENERATED. EDIT WITH CARE!
|
||||
```
|
||||
.
|
||||
|
|
@ -6,7 +6,7 @@ a web browser.
|
|||
### Configure nginx
|
||||
Assuming the relevant server block is in `/etc/nginx/sites-enabled/default` and the CA used to verify the client certificates is under `/etc/ssl/`,
|
||||
adjust the content of the `server{}` block like shown in the following example:
|
||||
<!-- MARKDOWN-AUTO-DOCS:START (CODE:src=../docs/scripts/TLSClientConfigsForITest.sh&lines=25-40) -->
|
||||
<!-- MARKDOWN-AUTO-DOCS:START (CODE:src=../docs/scripts/TLSClientConfigsForITest.sh&lines=25-38) -->
|
||||
<!-- The below code snippet is automatically added from ../docs/scripts/TLSClientConfigsForITest.sh -->
|
||||
```sh
|
||||
ssl_client_certificate '${SSL_CLIENT_CERTIFICATE}'; # e.g. ssl_client_certificate /etc/ssl/rootca-cert.pem;
|
||||
|
|
@ -20,9 +20,7 @@ adjust the content of the `server{}` block like shown in the following example:
|
|||
autoindex on;
|
||||
# in this location access is only allowed with client certs
|
||||
if ($ssl_client_verify != SUCCESS){
|
||||
# we use status code 404 == "Not Found", because we do not
|
||||
# want to reveal if this location exists or not.
|
||||
return 404;
|
||||
return 403;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
|
|
|||
|
|
@ -6,22 +6,35 @@
|
|||
csaf_aggregator [OPTIONS]
|
||||
|
||||
Application Options:
|
||||
-c, --config=CFG-FILE File name of the configuration file (default:
|
||||
aggregator.toml)
|
||||
--version Display version of the binary
|
||||
-t, --time_range=RANGE RANGE of time from which advisories to download
|
||||
-i, --interim Perform an interim scan
|
||||
--version Display version of the binary
|
||||
-c, --config=TOML-FILE Path to config TOML file
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
```
|
||||
|
||||
If no config file is explictly given the follwing places are searched for a config file:
|
||||
|
||||
```
|
||||
~/.config/csaf/aggregator.toml
|
||||
~/.csaf_aggregator.toml
|
||||
csaf_aggregator.toml
|
||||
```
|
||||
|
||||
with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems.
|
||||
|
||||
Usage example for a single run, to test if the config is good:
|
||||
|
||||
```bash
|
||||
./csaf_aggregator -c docs/examples/aggregator.toml
|
||||
```
|
||||
|
||||
Once the config is good, you can run the aggregator periodically
|
||||
in two modes. For instance using `cron` on Ubuntu and after placing
|
||||
in two modes: full and interim.
|
||||
|
||||
Here is a complete example using `cron` on Ubuntu. After placing
|
||||
the config file in `/etc/csaf_aggregator.toml` and making sure
|
||||
its permissions only allow the user `www-data` to read it:
|
||||
|
||||
|
|
@ -40,7 +53,7 @@ crontab -u www-data -l
|
|||
crontab -u www-data -e
|
||||
```
|
||||
|
||||
Crontab example, running the full mode one a day and updating
|
||||
Here is a crontab that runs the full mode once a day and updating
|
||||
interim advisories every 60 minutes:
|
||||
|
||||
```crontab
|
||||
|
|
@ -51,68 +64,144 @@ SHELL=/bin/bash
|
|||
30 0-23 * * * $HOME/bin/csaf_aggregator --config /etc/csaf_aggregator.toml --interim >> /var/log/csaf_aggregator/interim.log 2>&1
|
||||
```
|
||||
|
||||
|
||||
#### serve via web server
|
||||
|
||||
Serve the paths where the aggregator writes its `html/` output
|
||||
by means of a webserver.
|
||||
In the config example below place is configured by the path given for `web`.
|
||||
In the config example below the place in the filesystem
|
||||
is configured by the path given for `web`.
|
||||
|
||||
The user running the aggregator has to be able to write there
|
||||
and the web server must be able to read the files.
|
||||
|
||||
If you are using nginx, the setup instructions for the provider provide
|
||||
and example. You can leave out the cgi-bin part,
|
||||
potentially commend out the TLS client parts and
|
||||
If you are using nginx, the setup instructions for the provider give
|
||||
a template. For the aggregator the difference is that you can leave out
|
||||
the cgi-bin part, potentially commend out the TLS client parts and
|
||||
adjust the `root` path accordingly.
|
||||
|
||||
|
||||
### config options
|
||||
|
||||
The following options can be used in the config file in TOML format:
|
||||
The config file is written in [TOML](https://toml.io/en/v1.0.0).
|
||||
Each _key_ in the following table is optional and
|
||||
can be used directly in the file. If given it overrides the internal default.
|
||||
|
||||
```
|
||||
```go
|
||||
workers // number of parallel workers to start (default 10)
|
||||
folder // target folder on disc for writing the downloaded documents
|
||||
web // directory to be served by the webserver
|
||||
domain // base url where the contents will be reachable from outside
|
||||
rate // overall downloading limit per worker
|
||||
folder // target folder on disc for writing the downloaded documents (default "/var/www")
|
||||
web // directory to be served by the webserver (default "/var/www/html")
|
||||
domain // base url where the contents will be reachable from outside (default "https://example.com")
|
||||
rate // downloading limit per worker in HTTPS req/s (defaults to unlimited)
|
||||
insecure // do not check validity of TLS certificates
|
||||
aggregator // table with basic infos for the aggregator object
|
||||
providers // array of tables, each entry to be mirrored or listed
|
||||
openpgp_private_key // OpenPGP private key
|
||||
write_indices // write index.txt and changes.csv
|
||||
update_interval // to indicate the collection interval for a provider (default ""on best effort")
|
||||
create_service_document // write a service.json to the ROLIE feed docs for a provider (default false)
|
||||
categories // configure ROLIE category values for a provider
|
||||
openpgp_private_key // OpenPGP private key (must have no passphrase set, if
|
||||
// you want to be able to run unattended, e.g. via cron.)
|
||||
openpgp_public_key // OpenPGP public key
|
||||
passphrase // passphrase of the OpenPGP key
|
||||
lock_file // path to lockfile, to stop other instances if one is not done
|
||||
interim_years // limiting the years for which interim documents are searched
|
||||
verbose // print more diagnostic output, e.g. https request
|
||||
allow_single_provider // debugging option
|
||||
lock_file // path to lockfile, to stop other instances if one is not done (default:/var/lock/csaf_aggregator/lock, disable by setting it to "")
|
||||
interim_years // limiting the years for which interim documents are searched (default 0)
|
||||
verbose // print more diagnostic output, e.g. https requests (default false)
|
||||
allow_single_provider // debugging option (default false)
|
||||
ignore_pattern // patterns of advisory URLs to be ignored (see checker doc for details)
|
||||
client_cert // path to client certificate to access access-protected advisories
|
||||
client_key // path to client key to access access-protected advisories
|
||||
client_passphrase // optional client cert passphrase (limited, experimental, see downloader doc)
|
||||
header // adds extra HTTP header fields to the client
|
||||
time_range // Accepted time range of advisories to handle. See downloader docs for details.
|
||||
```
|
||||
|
||||
Rates are specified as floats in HTTPS operations per second.
|
||||
0 means no limit.
|
||||
Next we have two TOML _tables_:
|
||||
|
||||
```
|
||||
aggregator // basic infos for the aggregator object
|
||||
remote_validator // config for optional remote validation checker
|
||||
```
|
||||
|
||||
[See the provider config](csaf_provider.md#provider-options) about
|
||||
how to configure `remote_validator`.
|
||||
|
||||
At last there is the TOML _array of tables_:
|
||||
|
||||
```
|
||||
providers // each entry to be mirrored or listed
|
||||
```
|
||||
|
||||
where at least 2 providers have to be configured.
|
||||
With each _table_ allowing:
|
||||
|
||||
`providers` is an array of tables, each allowing
|
||||
```
|
||||
name
|
||||
domain
|
||||
rate
|
||||
insecure
|
||||
write_indices
|
||||
category
|
||||
update_interval
|
||||
create_service_document
|
||||
categories
|
||||
ignore_pattern
|
||||
client_cert
|
||||
client_key
|
||||
client_passphrase
|
||||
header
|
||||
```
|
||||
|
||||
Where valid `name` and `domain` settings are required.
|
||||
|
||||
If no user agent is specified with `header = "user-agent:custom-agent/1.0"`
|
||||
then the default agent in the form of `csaf_distribution/VERSION` is sent.
|
||||
|
||||
If you want an entry to be listed instead of mirrored
|
||||
in a `aggregator.category == "aggregator"` instance,
|
||||
set `category` to `lister` in the entry.
|
||||
Otherwise it is recommended to not set `category` for entries.
|
||||
|
||||
The remaining _keys_ per entry in the _table_ `providers`
|
||||
are optional and will take precedence instead
|
||||
of the directly given _keys_ in the TOML file and the internal defaults.
|
||||
|
||||
If a provider's `domain` starts with `https://` it is considered a publisher.
|
||||
These publishers are added to the `csaf_publishers` list, which is written
|
||||
to the `aggregator.json`.
|
||||
|
||||
To offer an easy way of assorting CSAF documents by criteria like
|
||||
document category, languages or values of the branch category within
|
||||
the product tree, ROLIE category values can be configured in `categories`.
|
||||
This can either
|
||||
be done using an array of strings taken literally or, by prepending `"expr:"`.
|
||||
The latter is evaluated as JSONPath and the result will be added into the
|
||||
categories document. For a more detailed explanation and examples,
|
||||
[refer to the provider config](csaf_provider.md#provider-options).
|
||||
|
||||
#### Example config file
|
||||
|
||||
<!-- MARKDOWN-AUTO-DOCS:START (CODE:src=../docs/examples/aggregator.toml) -->
|
||||
<!-- The below code snippet is automatically added from ../docs/examples/aggregator.toml -->
|
||||
|
||||
```toml
|
||||
workers = 2
|
||||
folder = "/var/csaf_aggregator"
|
||||
lock_file = "/var/csaf_aggregator/run.lock"
|
||||
lock_file = "/var/lock/csaf_aggregator/lock"
|
||||
web = "/var/csaf_aggregator/html"
|
||||
domain = "https://localhost:9443"
|
||||
rate = 10.0
|
||||
insecure = true
|
||||
#openpgp_private_key =
|
||||
#openpgp_public_key =
|
||||
#interim_years =
|
||||
#passphrase =
|
||||
#write_indices = false
|
||||
#time_range =
|
||||
|
||||
# specification requires at least two providers (default),
|
||||
# to override for testing, enable:
|
||||
# allow_single_provider = true
|
||||
|
||||
[aggregator]
|
||||
# Set if this instance shall be a mirror (aka `aggregator`) or a `lister`.
|
||||
# This determines the default value for the entries in [[provider]].
|
||||
category = "aggregator"
|
||||
name = "Example Development CSAF Aggregator"
|
||||
contact_details = "some @ somewhere"
|
||||
|
|
@ -122,20 +211,45 @@ insecure = true
|
|||
[[providers]]
|
||||
name = "local-dev-provider"
|
||||
domain = "localhost"
|
||||
categories = ["Example Company Product A", "expr:document.lang"]
|
||||
create_service_document = true
|
||||
# rate = 1.5
|
||||
# insecure = true
|
||||
# time_range =
|
||||
|
||||
[[providers]]
|
||||
name = "local-dev-provider2"
|
||||
domain = "localhost"
|
||||
domain = "https://localhost:8443/.well-known/csaf/provider-metadata.json"
|
||||
# rate = 1.2
|
||||
# insecure = true
|
||||
write_indices = true
|
||||
client_cert = "./../devca1/testclient1.crt"
|
||||
client_key = "./../devca1/testclient1-key.pem"
|
||||
# client_passphrase = # Limited and experimental, see downloader doc.
|
||||
# header =
|
||||
|
||||
#key =
|
||||
#passphrase =
|
||||
|
||||
# specification requires at least two providers (default),
|
||||
# to override for testing, enable:
|
||||
# allow_single_provider = true
|
||||
[[providers]]
|
||||
name = "local-dev-provider3"
|
||||
domain = "localhost"
|
||||
# rate = 1.8
|
||||
# insecure = true
|
||||
write_indices = true
|
||||
# If aggregator.category == "aggreator", set for an entry that should
|
||||
# be listed in addition:
|
||||
category = "lister"
|
||||
# ignore_pattern = [".*white.*", ".*red.*"]
|
||||
```
|
||||
|
||||
<!-- MARKDOWN-AUTO-DOCS:END -->
|
||||
|
||||
#### Publish others' advisories
|
||||
|
||||
In case you want to provide CSAF advisories from others
|
||||
that only qualify as CSAF publishers, see
|
||||
[how to use the `csaf_aggregator` as "CSAF proxy provider"](proxy-provider-for-aggregator.md).
|
||||
|
||||
Some providers may limit the rate of requests that may be sent to retrieve advisories.
|
||||
This may cause issues with the aggregator.
|
||||
In this case, the --rate option can be used to adjust the requests per second
|
||||
sent by each worker of the aggregator to an acceptable rate.
|
||||
(The rate that is considered acceptable depends on the provider.)
|
||||
|
|
|
|||
|
|
@ -3,22 +3,108 @@
|
|||
### Usage
|
||||
|
||||
```
|
||||
csaf_checker [OPTIONS]
|
||||
Usage:
|
||||
csaf_checker [OPTIONS] domain...
|
||||
|
||||
Application Options:
|
||||
-o, --output=REPORT-FILE File name of the generated report
|
||||
-f, --format=[json|html] Format of report (default: json)
|
||||
--insecure Do not check TLS certificates from provider
|
||||
--client-cert=CERT-FILE TLS client certificate file (PEM encoded data)
|
||||
--client-key=KEY-FILE TLS client private key file (PEM encoded data)
|
||||
--client_cert=CERT-FILE TLS client certificate file (PEM encoded data)
|
||||
--client_key=KEY-FILE TLS client private key file (PEM encoded data)
|
||||
--client_passphrase=PASSPHRASE Optional passphrase for the client cert (limited, experimental, see downloader doc)
|
||||
--version Display version of the binary
|
||||
-v, --verbose Verbose output
|
||||
-r, --rate= The average upper limit of https operations
|
||||
per second
|
||||
-r, --rate= The average upper limit of https operations per second (defaults to unlimited)
|
||||
-t, --time_range=RANGE RANGE of time from which advisories to download
|
||||
-i, --ignore_pattern=PATTERN Do not download files if their URLs match any of the given PATTERNs
|
||||
-H, --header= One or more extra HTTP header fields
|
||||
--validator=URL URL to validate documents remotely
|
||||
--validator_cache=FILE FILE to cache remote validations
|
||||
--validator_preset= One or more presets to validate remotely (default: [mandatory])
|
||||
-c, --config=TOML-FILE Path to config TOML file
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
```
|
||||
|
||||
Will check all given _domains_, by trying each as a CSAF provider.
|
||||
|
||||
If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf_distribution/VERSION` is sent.
|
||||
|
||||
If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and checking proceeds from there.
|
||||
|
||||
If no config file is explictly given the follwing places are searched for a config file:
|
||||
|
||||
```
|
||||
~/.config/csaf/checker.toml
|
||||
~/.csaf_checker.toml
|
||||
csaf_checker.toml
|
||||
```
|
||||
|
||||
with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems.
|
||||
Supported options in config files:
|
||||
|
||||
```
|
||||
output = ""
|
||||
format = "json"
|
||||
insecure = false
|
||||
# client_cert # not set by default
|
||||
# client_key # not set by default
|
||||
# client_passphrase # not set by default
|
||||
verbose = false
|
||||
# rate # not set by default
|
||||
# time_range # not set by default
|
||||
# header # not set by default
|
||||
# validator # not set by default
|
||||
# validator_cache # not set by default
|
||||
validator_preset = ["mandatory"]
|
||||
```
|
||||
|
||||
Usage example:
|
||||
` ./csaf_checker example.com -f html --rate=5.3 -o check-results.html`
|
||||
`./csaf_checker example.com -f html --rate=5.3 -H apikey:SECRET -o check-results.html`
|
||||
|
||||
Each performed check has a return type of either 0,1 or 2:
|
||||
|
||||
```
|
||||
type 0: success
|
||||
type 1: warning
|
||||
type 2: error
|
||||
```
|
||||
|
||||
The checker result is a success if no checks resulted in type 2, and a failure otherwise.
|
||||
|
||||
The option `timerange` allows to only check advisories from a given time
|
||||
interval. It can only be given once. See the
|
||||
[downloader documentation](csaf_downloader.md#timerange-option) for details.
|
||||
|
||||
Some providers may limit the rate of requests that may be sent to retrieve advisories.
|
||||
This may cause the checker to be unable to retrieve all advisories. In this case,
|
||||
the --rate option can be used to adjust the requests per second
|
||||
sent by the checker to an acceptable rate.
|
||||
(The rate that is considered acceptable depends on the provider.)
|
||||
|
||||
|
||||
You can ignore certain advisories while checking by specifying a list
|
||||
of regular expressions[^1] to match their URLs by using the `ignorepattern`
|
||||
option.
|
||||
E.g. `-i='.*white.*' -i='*.red.*'` will ignore files which URLs contain
|
||||
the sub strings **white** or **red**.
|
||||
In the config file this has to be noted as:
|
||||
|
||||
```
|
||||
ignorepattern = [".*white.*", ".*red.*"]
|
||||
```
|
||||
|
||||
### Remarks
|
||||
|
||||
The `role` given in the `provider-metadata.json` is not
|
||||
yet considered to change the overall result,
|
||||
see <https://github.com/gocsaf/csaf/issues/221> .
|
||||
|
||||
If a provider hosts one or more advisories with a TLP level of AMBER or RED, then these advisories must be access protected.
|
||||
To check these advisories, authorization can be given via custom headers or certificates.
|
||||
The authorization method chosen needs to grant access to all advisories, as otherwise the
|
||||
checker will be unable to check the advisories it doesn't have permission for, falsifying the result.
|
||||
|
||||
[^1]: Accepted syntax is described [here](https://github.com/google/re2/wiki/Syntax).
|
||||
|
|
|
|||
176
docs/csaf_downloader.md
Normal file
176
docs/csaf_downloader.md
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
## csaf_downloader
|
||||
|
||||
A tool to download CSAF documents from CSAF providers.
|
||||
|
||||
### Usage
|
||||
|
||||
```
|
||||
csaf_downloader [OPTIONS] domain...
|
||||
|
||||
Application Options:
|
||||
-d, --directory=DIR DIRectory to store the downloaded files in
|
||||
--insecure Do not check TLS certificates from provider
|
||||
--ignore_sigcheck Ignore signature check results, just warn on mismatch
|
||||
--client_cert=CERT-FILE TLS client certificate file (PEM encoded data)
|
||||
--client_key=KEY-FILE TLS client private key file (PEM encoded data)
|
||||
--client_passphrase=PASSPHRASE Optional passphrase for the client cert (limited, experimental, see doc)
|
||||
--version Display version of the binary
|
||||
-n, --no_store Do not store files
|
||||
-r, --rate= The average upper limit of https operations per second (defaults to unlimited)
|
||||
-w, --worker=NUM NUMber of concurrent downloads (default: 2)
|
||||
-t, --time_range=RANGE RANGE of time from which advisories to download
|
||||
-f, --folder=FOLDER Download into a given subFOLDER
|
||||
-i, --ignore_pattern=PATTERN Do not download files if their URLs match any of the given PATTERNs
|
||||
-H, --header= One or more extra HTTP header fields
|
||||
--enumerate_pmd_only If this flag is set to true, the downloader will only enumerate valid provider metadata files, but not download documents
|
||||
--validator=URL URL to validate documents remotely
|
||||
--validator_cache=FILE FILE to cache remote validations
|
||||
--validator_preset=PRESETS One or more PRESETS to validate remotely (default: [mandatory])
|
||||
-m, --validation_mode=MODE[strict|unsafe] MODE how strict the validation is (default: strict)
|
||||
--forward_url=URL URL of HTTP endpoint to forward downloads to
|
||||
--forward_header= One or more extra HTTP header fields used by forwarding
|
||||
--forward_queue=LENGTH Maximal queue LENGTH before forwarder (default: 5)
|
||||
--forward_insecure Do not check TLS certificates from forward endpoint
|
||||
--log_file=FILE FILE to log downloading to (default: downloader.log)
|
||||
--log_level=LEVEL[debug|info|warn|error] LEVEL of logging details (default: info)
|
||||
-c, --config=TOML-FILE Path to config TOML file
|
||||
--preferred_hash=HASH[sha256|sha512] HASH to prefer
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
```
|
||||
|
||||
Will download all CSAF documents for the given _domains_, by trying each as a CSAF provider.
|
||||
|
||||
If no user agent is specified with `--header=user-agent:custom-agent/1.0` then the default agent in the form of `csaf_distribution/VERSION` is sent.
|
||||
|
||||
If a _domain_ starts with `https://` it is instead considered a direct URL to the `provider-metadata.json` and downloading procedes from there.
|
||||
|
||||
Increasing the number of workers opens more connections to the web servers
|
||||
to download more advisories at once. This may improve the overall speed of the download.
|
||||
However, since this also increases the load on the servers, their administrators could
|
||||
have taken countermeasures to limit this.
|
||||
|
||||
For example, some providers may limit the rate of requests that may be sent to retrieve advisories.
|
||||
This may cause the downloader to be unable to retrieve all advisories.
|
||||
In this case, the --rate option can be used to adjust the requests per second
|
||||
sent by the downloader to an acceptable rate.
|
||||
(The rate that is considered acceptable depends on the provider.)
|
||||
|
||||
If no config file is explictly given the follwing places are searched for a config file:
|
||||
|
||||
```
|
||||
~/.config/csaf/downloader.toml
|
||||
~/.csaf_downloader.toml
|
||||
csaf_downloader.toml
|
||||
```
|
||||
|
||||
with `~` expanding to `$HOME` on unixoid systems and `%HOMEPATH` on Windows systems.
|
||||
|
||||
Supported options in config files:
|
||||
|
||||
```
|
||||
# directory # not set by default
|
||||
insecure = false
|
||||
# client_cert # not set by default
|
||||
# client_key # not set by default
|
||||
# client_passphrase # not set by default
|
||||
ignore_sigcheck = false
|
||||
# rate # set to unlimited
|
||||
worker = 2
|
||||
# time_range # not set by default
|
||||
# folder # not set by default
|
||||
# ignore_pattern # not set by default
|
||||
# header # not set by default
|
||||
# validator # not set by default
|
||||
# validator_cache # not set by default
|
||||
validator_preset = ["mandatory"]
|
||||
validation_mode = "strict"
|
||||
# forward_url # not set by default
|
||||
# forward_header # not set by default
|
||||
forward_queue = 5
|
||||
forward_insecure = false
|
||||
```
|
||||
|
||||
If the `folder` option is given all the advisories are stored in a subfolder
|
||||
of this name. Otherwise the advisories are each stored in a folder named
|
||||
by the year they are from.
|
||||
|
||||
You can ignore certain advisories while downloading by specifying a list
|
||||
of regular expressions[^1] to match their URLs by using the `ignorepattern`
|
||||
option.
|
||||
|
||||
E.g. `-i='.*white.*' -i='*.red.*'` will ignore files which URLs contain
|
||||
the sub strings **white** or **red**.
|
||||
In the config file this has to be noted as:
|
||||
|
||||
```
|
||||
ignorepattern = [".*white.*", ".*red.*"]
|
||||
```
|
||||
|
||||
#### Timerange option
|
||||
|
||||
The `time_range` parameter enables downloading advisories
|
||||
which last changes falls into a given intervall.
|
||||
There are three possible notations:
|
||||
|
||||
1. Relative. If the given string follows the rules of a
|
||||
[Go duration](https://pkg.go.dev/time@go1.20.6#ParseDuration),
|
||||
the time interval from now going back that duration is used.
|
||||
In extension to this the suffixes 'd' for days, 'M' for month
|
||||
and 'y' for years are recognized. In these cases only integer
|
||||
values are accepted without any fractions.
|
||||
Some examples:
|
||||
|
||||
- `"3h"` means downloading the advisories that have changed in the last three hours.
|
||||
- `"30m"` .. changed within the last thirty minutes.
|
||||
- `"3M2m"` .. changed within the last three months and two minutes.
|
||||
- `"2y"` .. changed within the last two years.
|
||||
|
||||
2. Absolute. If the given string is an RFC 3339 date timestamp
|
||||
the time interval between this date and now is used.
|
||||
E.g. `"2006-01-02"` means that all files between 2006 January 2nd and now going to being
|
||||
downloaded.
|
||||
Accepted patterns are:
|
||||
|
||||
- `"2006-01-02T15:04:05Z"`
|
||||
- `"2006-01-02T15:04:05+07:00"`
|
||||
- `"2006-01-02T15:04:05-07:00"`
|
||||
- `"2006-01-02T15:04:05"`
|
||||
- `"2006-01-02T15:04"`
|
||||
- `"2006-01-02T15"`
|
||||
- `"2006-01-02"`
|
||||
- `"2006-01"`
|
||||
- `"2006"`
|
||||
|
||||
Missing parts are set to the smallest value possible in that field.
|
||||
|
||||
3. Range. Same as 2 but separated by a `,` to span an interval. e.g `2019,2024`
|
||||
spans an interval from 1st January 2019 to the 1st January of 2024.
|
||||
|
||||
All interval boundaries are inclusive.
|
||||
|
||||
#### Forwarding
|
||||
|
||||
The downloader is able to forward downloaded advisories and their checksums,
|
||||
OpenPGP signatures and validation results to an HTTP endpoint.
|
||||
The details of the implemented API are described [here](https://github.com/mfd2007/csaf_upload_interface).
|
||||
**Attention** This is a work in progress. There is
|
||||
no production ready server which implements this protocol.
|
||||
The server in the linked repository is currently for development and testing only.
|
||||
|
||||
#### beware of client cert passphrase
|
||||
|
||||
The `client-passphrase` option implements a legacy private
|
||||
key protection mechanism based on RFC 1423, see
|
||||
[DecryptPEMBlock](https://pkg.go.dev/crypto/x509@go1.20.6#DecryptPEMBlock).
|
||||
Thus it considered experimental and most likely to be removed
|
||||
in a future release. Please only use this option, if you fully understand
|
||||
the security implications!
|
||||
Note that for fully automated processes, it usually does not make sense
|
||||
to protect the client certificate's private key with a passphrase.
|
||||
Because the passphrase has to be accessible to the process anyway to run
|
||||
unattented. In this situation the processing environment should be secured
|
||||
properly instead.
|
||||
|
||||
[^1]: Accepted syntax is described [here](https://github.com/google/re2/wiki/Syntax).
|
||||
|
|
@ -1,30 +1,146 @@
|
|||
`csaf_provider` implements the CGI interface for webservers
|
||||
and reads its configuration from a TOML file.
|
||||
`csaf_provider` implements a CGI interface for webservers
|
||||
and reads its configuration from a [TOML](https://toml.io/en/) file.
|
||||
The [setup docs](../README.md#setup-trusted-provider)
|
||||
explain how to wire this up with nginx and where the config file lives.
|
||||
|
||||
When installed, two endpoints are offered,
|
||||
and you should use the [csaf_uploader](../docs/csaf_uploader.md)
|
||||
to access them:
|
||||
|
||||
### /api/create
|
||||
|
||||
Must be called once after all configuration values are set.
|
||||
It will write the `provider-metadata.json` and may write
|
||||
or update the`security.txt`.
|
||||
|
||||
Once the files exist, they will **not** be overwriten
|
||||
by additional `create` calls, even if the config values have been changed.
|
||||
Changes should happen rarely and can be done manually.
|
||||
Also keep an eye on having the keys in the `.well-known/csaf/openpgp`
|
||||
folder match the ones mentioned in the `provider-metadata.json`.
|
||||
|
||||
### /api/upload
|
||||
Called for each upload of a document and will update
|
||||
the CSAF structure in the file system accordingly.
|
||||
|
||||
|
||||
## Provider options
|
||||
|
||||
Following options are supported in the config file:
|
||||
The following example file documents all available configuration options:
|
||||
|
||||
- password: Authentication password for accessing the CSAF provider.
|
||||
- openpgp_public_key: The public OpenPGP key. Default: `/ust/lib/csaf/openpgp_public.asc`
|
||||
- openpgp_private_key: The private OpenPGP key. Default: `/ust/lib/csaf/openpgp_private.asc`
|
||||
- folder: Specify the root folder. Default: `/var/www/`.
|
||||
- web: Specify the web folder. Default: `/var/www/html`.
|
||||
- tlps: Set the allowed TLP comming with the upload request (one or more of "csaf", "white", "amber", "green", "red").
|
||||
The "csaf" selection lets the provider takes the value from the CSAF document.
|
||||
These affects the list items in the web interface.
|
||||
Default: `["csaf", "white", "amber", "green", "red"]`.
|
||||
- upload_signature: Send signature with the request, an additional input-field in the web interface will be shown to let user enter an ascii armored signature. Default: `false`.
|
||||
- canonical_url_prefix: start of the URL where contents shall be accessible from the internet. Default: `https://$SERVER_NAME`.
|
||||
- no_passphrase: Let user send password with the request, if set to true the input-field in the web interface will be disappeared. Default: `false`.
|
||||
- no_validation: Validate the uploaded CSAF document against the JSON schema. Default: `false`.
|
||||
- no_web_ui: Disable the web interface. Default: `false`.
|
||||
- dynamic_provider_metadata: Take the publisher from the CSAF document. Default: `false`.
|
||||
- provider_metadata: Configure the provider metadata.
|
||||
- provider_metadata.list_on_CSAF_aggregators: List on aggregators
|
||||
- provider_metadata.mirror_on_CSAF_aggregators: Mirror on aggregators
|
||||
- provider_metadata.publisher: Set the publisher. Default: `{"category"= "vendor", "name"= "Example", "namespace"= "https://example.com"}`.
|
||||
- upload_limit: Set the upload limit size of the file. Default: `50 MiB`.
|
||||
- issuer: The issuer of the CA, which if set, restricts the writing permission and the accessing to the web-interface to only the client certificates signed with this CA.
|
||||
<!-- MARKDOWN-AUTO-DOCS:START (CODE:src=../docs/examples/provider_config.toml) -->
|
||||
<!-- The below code snippet is automatically added from ../docs/examples/provider_config.toml -->
|
||||
```toml
|
||||
# Set the authentication password for accessing the CSAF provider.
|
||||
# It is essential that you set a secure password between the quotation marks.
|
||||
# The default being no password set.
|
||||
#password = ""
|
||||
|
||||
# Set the path to the public OpenPGP key.
|
||||
#openpgp_public_key = "/etc/csaf/openpgp_public.asc"
|
||||
|
||||
# Set the path to the private OpenPGP key.
|
||||
#openpgp_private_key = "/etc/csaf/openpgp_private.asc"
|
||||
|
||||
# Specify the root folder.
|
||||
#folder = "/var/www/"
|
||||
|
||||
# Specify the web folder.
|
||||
#web = "/var/www/html"
|
||||
|
||||
# Allow sending a signature with the request.
|
||||
# An additional input-field in the web interface will be shown
|
||||
# to let user enter an ascii armored OpenPGP signature.
|
||||
#upload_signature = false
|
||||
|
||||
# Set the beginning of the URL where contents are accessible from the internet.
|
||||
# If not set, the provider will read from the $SERVER_NAME variable.
|
||||
# The following shows an example of a manually set prefix:
|
||||
#canonical_url_prefix = "https://localhost"
|
||||
|
||||
# Require users to use both
|
||||
# (1) a password and (2) a valid Client Certificate for write access.
|
||||
#certificate_and_password = false
|
||||
|
||||
# Allow the user to send the request without having to send a passphrase
|
||||
# to unlock the the OpenPGP key.
|
||||
# If set to true, the input-field in the web interface will be omitted.
|
||||
#no_passphrase = false
|
||||
|
||||
# Make the provider skip the validation of the uploaded CSAF document
|
||||
# against the JSON schema.
|
||||
#no_validation = false
|
||||
|
||||
# Disable the experimental web interface.
|
||||
#no_web_ui = true
|
||||
|
||||
# Make the provider take the publisher from the CSAF document.
|
||||
#dynamic_provider_metadata = false
|
||||
|
||||
# Set the upload limit size of a file in bytes.
|
||||
# The default is equivalent to 50 MiB.
|
||||
#upload_limit = 52428800
|
||||
|
||||
# Set the issuer of the CA.
|
||||
# If set, the provider restricts the writing permission and the
|
||||
# access to the web-interface to users with the client certificates
|
||||
# signed with this CA.
|
||||
# The following shows an example. As default, none is set.
|
||||
#issuer = "Example Company"
|
||||
|
||||
# Make the provider write/update index.txt and changes.csv.
|
||||
#write_indices = false
|
||||
|
||||
# Make the provider write a `CSAF:` entry into `security.txt`.
|
||||
#write_security = false
|
||||
|
||||
# Set the TLP allowed to be send with the upload request
|
||||
# (one or more of "csaf", "white", "amber", "green", "red").
|
||||
# The "csaf" entry lets the provider take the value from the CSAF document.
|
||||
# These affect the list items in the web interface.
|
||||
#tlps = ["csaf", "white", "amber", "green", "red"]
|
||||
|
||||
# Make the provider create a ROLIE service document.
|
||||
#create_service_document = false
|
||||
|
||||
# Make the provider create a ROLIE category document from a list of strings.
|
||||
# If a list item starts with `expr:`
|
||||
# the rest of the string is used as a JsonPath expression
|
||||
# to extract a string from the incoming advisories.
|
||||
# Strings not starting with `expr:` are taken verbatim.
|
||||
# By default no category documents are created.
|
||||
# This example provides an overview over the syntax,
|
||||
# adjust the parameters depending on your setup.
|
||||
#categories = ["Example Company Product A", "expr:document.lang"]
|
||||
|
||||
# Make the provider use a remote validator service. Not used by default.
|
||||
# This example provides an overview over the syntax,
|
||||
# adjust the parameters depending on your setup.
|
||||
#[remote_validator]
|
||||
#url = "http://localhost:8082"
|
||||
#presets = ["mandatory"]
|
||||
#cache = "/var/lib/csaf/validations.db"
|
||||
|
||||
[provider_metadata]
|
||||
# Indicate that aggregators can list us.
|
||||
list_on_CSAF_aggregators = true
|
||||
# Indicate that aggregators can mirror us.
|
||||
mirror_on_CSAF_aggregators = true
|
||||
|
||||
# Set the publisher details.
|
||||
[provider_metadata.publisher]
|
||||
category = "vendor"
|
||||
name = "Example Company"
|
||||
namespace = "https://example.com"
|
||||
issuing_authority = "We at Example Company are responsible for publishing and maintaining Product Y."
|
||||
contact_details = "Example Company can be reached at contact_us@example.com, or via our website at https://www.example.com/contact."
|
||||
```
|
||||
<!-- MARKDOWN-AUTO-DOCS:END -->
|
||||
|
||||
|
||||
### Experimental web upload interface
|
||||
|
||||
There is an experimental upload interface which works with a web browser.
|
||||
It is disabled by default, as there are known issues, notably:
|
||||
* https://github.com/gocsaf/csaf/issues/43
|
||||
* https://github.com/gocsaf/csaf/issues/256
|
||||
|
|
|
|||
16
docs/csaf_searcher.md
Normal file
16
docs/csaf_searcher.md
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
# csaf_advisory_example
|
||||
|
||||
This is a small searcher using the advisory model to search for PURLs belonging to a product ID in an advisory of the CSAF 2.0 standard.
|
||||
|
||||
Usage:
|
||||
```
|
||||
|
||||
csaf_advisory_example OPTIONS [files...]
|
||||
|
||||
Application Options:
|
||||
-p The Product ID
|
||||
|
||||
Help Options:
|
||||
-h, --help Show a help message
|
||||
|
||||
```
|
||||
|
|
@ -3,31 +3,33 @@
|
|||
### Usage
|
||||
|
||||
```
|
||||
csaf_uploader [OPTIONS]
|
||||
csaf_uploader [OPTIONS]
|
||||
|
||||
Application Options:
|
||||
-a, --action=[upload|create] Action to perform (default: upload)
|
||||
-u, --url=URL URL of the CSAF provider (default:
|
||||
https://localhost/cgi-bin/csaf_provider.go)
|
||||
-u, --url=URL URL of the CSAF provider (default: https://localhost/cgi-bin/csaf_provider.go)
|
||||
-t, --tlp=[csaf|white|green|amber|red] TLP of the feed (default: csaf)
|
||||
-x, --external-signed CSAF files are signed externally. Assumes .asc files
|
||||
beside CSAF files.
|
||||
-s, --no-schema-check Do not check files against CSAF JSON schema locally.
|
||||
-x, --external_signed CSAF files are signed externally. Assumes .asc files beside CSAF files.
|
||||
-s, --no_schema_check Do not check files against CSAF JSON schema locally.
|
||||
-k, --key=KEY-FILE OpenPGP key to sign the CSAF files
|
||||
-p, --password=PASSWORD Authentication password for accessing the CSAF provider
|
||||
-P, --passphrase=PASSPHRASE Passphrase to unlock the OpenPGP key
|
||||
--client-cert=CERT-FILE.crt TLS client certificate file (PEM encoded data)
|
||||
--client-key=KEY-FILE.pem TLS client private key file (PEM encoded data)
|
||||
-i, --password-interactive Enter password interactively
|
||||
-I, --passphrase-interactive Enter passphrase interactively
|
||||
--client_cert=CERT-FILE.crt TLS client certificate file (PEM encoded data)
|
||||
--client_key=KEY-FILE.pem TLS client private key file (PEM encoded data)
|
||||
--client_passphrase=PASSPHRASE Optional passphrase for the client cert (limited, experimental, see downloader doc)
|
||||
-i, --password_interactive Enter password interactively
|
||||
-I, --passphrase_interactive Enter OpenPGP key passphrase interactively
|
||||
--insecure Do not check TLS certificates from provider
|
||||
-c, --config=INI-FILE Path to config ini file
|
||||
-c, --config=TOML-FILE Path to config TOML file
|
||||
--version Display version of the binary
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
```
|
||||
E.g. creating the initial directiories and files
|
||||
E.g. creating the initial directories and files.
|
||||
This must only be done once, as subsequent `create` calls to the
|
||||
[csaf_provider](../docs/csaf_provider.md)
|
||||
may not lead to the desired result.
|
||||
|
||||
```bash
|
||||
./csaf_uploader -a create -u https://localhost/cgi-bin/csaf_provider.go
|
||||
|
|
@ -41,20 +43,35 @@ E.g. uploading a csaf-document
|
|||
|
||||
which asks to enter a password interactively.
|
||||
|
||||
To upload an already signed document, use the `-x` option
|
||||
```bash
|
||||
# Note: The file CSAF-document-1.json.asc must exist
|
||||
./csaf_uploader -x -a upload -I -t white -u https://localhost/cgi-bin/csaf_provider.go CSAF-document-1.json
|
||||
```
|
||||
|
||||
By default csaf_uploader will try to load a config file
|
||||
from the following places:
|
||||
|
||||
```
|
||||
"~/.config/csaf/uploader.ini",
|
||||
"~/.csaf_uploader.ini",
|
||||
"csaf_uploader.ini",
|
||||
"~/.config/csaf/uploader.toml",
|
||||
"~/.csaf_uploader.toml",
|
||||
"csaf_uploader.toml",
|
||||
```
|
||||
|
||||
The command line options can be written in the init file, except:
|
||||
`password-interactive`, `passphrase-interactive` and `config`.
|
||||
An example:
|
||||
|
||||
The command line options can be written in the config file:
|
||||
```
|
||||
action=create
|
||||
u=https://localhost/cgi-bin/csaf_provider.go
|
||||
action = "upload"
|
||||
url = "https://localhost/cgi-bin/csaf_provider.go"
|
||||
tlp = "csaf"
|
||||
external_signed = false
|
||||
no_schema_check = false
|
||||
# key = "/path/to/openpgp/key/file" # not set by default
|
||||
# password = "auth-key to access the provider" # not set by default
|
||||
# passphrase = "OpenPGP passphrase" # not set by default
|
||||
# client_cert = "/path/to/client/cert" # not set by default
|
||||
# client_key = "/path/to/client/cert.key" # not set by default
|
||||
# client_passphrase = "client cert passphrase" # not set by default
|
||||
password_interactive = false
|
||||
passphrase_interactive = false
|
||||
insecure = false
|
||||
```
|
||||
|
|
|
|||
34
docs/csaf_validator.md
Normal file
34
docs/csaf_validator.md
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
## csaf_validator
|
||||
|
||||
is a tool to validate local advisories files against the JSON Schema and an optional remote validator.
|
||||
|
||||
### Exit codes
|
||||
|
||||
If no fatal error occurs the program will exit with an exit code `n` with the following conditions:
|
||||
|
||||
- `n == 0`: all valid
|
||||
- `(n & 1) > 0`: a general error occurred, all other flags are unset (see logs for more information)
|
||||
- `(n & 2) > 0`: schema validation failed
|
||||
- `(n & 4) > 0`: no remote validator configured
|
||||
- `(n & 8) > 0`: failure in remote validation
|
||||
|
||||
### Usage
|
||||
|
||||
```
|
||||
csaf_validator [OPTIONS] files...
|
||||
|
||||
Application Options:
|
||||
--version Display version of the binary
|
||||
--validator=URL URL to validate documents remotely
|
||||
--validator_cache=FILE FILE to cache remote validations
|
||||
--validator_preset= One or more presets to validate remotely (default: mandatory)
|
||||
-o AMOUNT, --output=AMOUNT If a remote validator was used, display the results in JSON format
|
||||
|
||||
AMOUNT:
|
||||
all: Print the entire JSON output
|
||||
important: Print the entire JSON output but omit all tests without errors, warnings and infos.
|
||||
short: Print only the result, errors, warnings and infos.
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
```
|
||||
|
|
@ -55,7 +55,7 @@ signing_key
|
|||
encryption_key
|
||||
non_repudiation
|
||||
|
||||
dns_name = "*.local"
|
||||
dns_name = "*.test"
|
||||
dns_name = "localhost"
|
||||
|
||||
serial = 010
|
||||
|
|
|
|||
|
|
@ -1,12 +1,25 @@
|
|||
workers = 2
|
||||
folder = "/var/csaf_aggregator"
|
||||
lock_file = "/var/csaf_aggregator/run.lock"
|
||||
lock_file = "/var/lock/csaf_aggregator/lock"
|
||||
web = "/var/csaf_aggregator/html"
|
||||
domain = "https://localhost:9443"
|
||||
rate = 10.0
|
||||
insecure = true
|
||||
#verbose = false
|
||||
#openpgp_private_key =
|
||||
#openpgp_public_key =
|
||||
#interim_years =
|
||||
#passphrase =
|
||||
#write_indices = false
|
||||
#time_range =
|
||||
|
||||
# specification requires at least two providers (default),
|
||||
# to override for testing, enable:
|
||||
# allow_single_provider = true
|
||||
|
||||
[aggregator]
|
||||
# Set if this instance shall be a mirror (aka `aggregator`) or a `lister`.
|
||||
# This determines the default value for the entries in [[provider]].
|
||||
category = "aggregator"
|
||||
name = "Example Development CSAF Aggregator"
|
||||
contact_details = "some @ somewhere"
|
||||
|
|
@ -16,19 +29,30 @@ insecure = true
|
|||
[[providers]]
|
||||
name = "local-dev-provider"
|
||||
domain = "localhost"
|
||||
categories = ["Example Company Product A", "expr:document.lang"]
|
||||
create_service_document = true
|
||||
# rate = 1.5
|
||||
# insecure = true
|
||||
# time_range =
|
||||
|
||||
[[providers]]
|
||||
name = "local-dev-provider2"
|
||||
domain = "localhost"
|
||||
domain = "https://localhost:8443/.well-known/csaf/provider-metadata.json"
|
||||
# rate = 1.2
|
||||
# insecure = true
|
||||
write_indices = true
|
||||
client_cert = "./../devca1/testclient1.crt"
|
||||
client_key = "./../devca1/testclient1-key.pem"
|
||||
# client_passphrase = # Limited and experimental, see downloader doc.
|
||||
# header =
|
||||
|
||||
#key =
|
||||
#passphrase =
|
||||
|
||||
# specification requires at least two providers (default),
|
||||
# to override for testing, enable:
|
||||
# allow_single_provider = true
|
||||
|
||||
[[providers]]
|
||||
name = "local-dev-provider3"
|
||||
domain = "localhost"
|
||||
# rate = 1.8
|
||||
# insecure = true
|
||||
write_indices = true
|
||||
# If aggregator.category == "aggregator", set for an entry that should
|
||||
# be listed in addition:
|
||||
category = "lister"
|
||||
# ignore_pattern = [".*white.*", ".*red.*"]
|
||||
|
|
|
|||
102
docs/examples/provider_config.toml
Normal file
102
docs/examples/provider_config.toml
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
# Set the authentication password for accessing the CSAF provider.
|
||||
# It is essential that you set a secure password between the quotation marks.
|
||||
# The default being no password set.
|
||||
#password = ""
|
||||
|
||||
# Set the path to the public OpenPGP key.
|
||||
#openpgp_public_key = "/etc/csaf/openpgp_public.asc"
|
||||
|
||||
# Set the path to the private OpenPGP key.
|
||||
#openpgp_private_key = "/etc/csaf/openpgp_private.asc"
|
||||
|
||||
# Specify the root folder.
|
||||
#folder = "/var/www/"
|
||||
|
||||
# Specify the web folder.
|
||||
#web = "/var/www/html"
|
||||
|
||||
# Allow sending a signature with the request.
|
||||
# An additional input-field in the web interface will be shown
|
||||
# to let user enter an ascii armored OpenPGP signature.
|
||||
#upload_signature = false
|
||||
|
||||
# Set the beginning of the URL where contents are accessible from the internet.
|
||||
# If not set, the provider will read from the $SERVER_NAME variable.
|
||||
# The following shows an example of a manually set prefix:
|
||||
#canonical_url_prefix = "https://localhost"
|
||||
|
||||
# Require users to use a password and a valid Client Certificate for write access.
|
||||
#certificate_and_password = false
|
||||
|
||||
# Allow the user to send the request without having to send a passphrase
|
||||
# to unlock the the OpenPGP key.
|
||||
# If set to true, the input-field in the web interface will be omitted.
|
||||
#no_passphrase = false
|
||||
|
||||
# Make the provider skip the validation of the uploaded CSAF document
|
||||
# against the JSON schema.
|
||||
#no_validation = false
|
||||
|
||||
# Disable the experimental web interface.
|
||||
#no_web_ui = true
|
||||
|
||||
# Make the provider take the publisher from the CSAF document.
|
||||
#dynamic_provider_metadata = false
|
||||
|
||||
# Set the upload limit size of a file in bytes.
|
||||
# The default is equivalent to 50 MiB.
|
||||
#upload_limit = 52428800
|
||||
|
||||
# Set the issuer of the CA.
|
||||
# If set, the provider restricts the writing permission and the
|
||||
# access to the web-interface to users with the client certificates
|
||||
# signed with this CA.
|
||||
# The following shows an example. As default, none is set.
|
||||
#issuer = "Example Company"
|
||||
|
||||
# Make the provider write/update index.txt and changes.csv.
|
||||
#write_indices = false
|
||||
|
||||
# Make the provider write a `CSAF:` entry into `security.txt`.
|
||||
#write_security = false
|
||||
|
||||
# Set the TLP allowed to be send with the upload request
|
||||
# (one or more of "csaf", "white", "amber", "green", "red").
|
||||
# The "csaf" entry lets the provider take the value from the CSAF document.
|
||||
# These affect the list items in the web interface.
|
||||
#tlps = ["csaf", "white", "amber", "green", "red"]
|
||||
|
||||
# Make the provider create a ROLIE service document.
|
||||
#create_service_document = false
|
||||
|
||||
# Make the provider create a ROLIE category document from a list of strings.
|
||||
# If a list item starts with `expr:`
|
||||
# the rest of the string is used as a JsonPath expression
|
||||
# to extract a string from the incoming advisories.
|
||||
# Strings not starting with `expr:` are taken verbatim.
|
||||
# By default no category documents are created.
|
||||
# This example provides an overview over the syntax,
|
||||
# adjust the parameters depending on your setup.
|
||||
#categories = ["Example Company Product A", "expr:document.lang"]
|
||||
|
||||
# Make the provider use a remote validator service. Not used by default.
|
||||
# This example provides an overview over the syntax,
|
||||
# adjust the parameters depending on your setup.
|
||||
#[remote_validator]
|
||||
#url = "http://localhost:8082"
|
||||
#presets = ["mandatory"]
|
||||
#cache = "/var/lib/csaf/validations.db"
|
||||
|
||||
[provider_metadata]
|
||||
# Indicate that aggregators can list us.
|
||||
list_on_CSAF_aggregators = true
|
||||
# Indicate that aggregators can mirror us.
|
||||
mirror_on_CSAF_aggregators = true
|
||||
|
||||
# Set the publisher details.
|
||||
[provider_metadata.publisher]
|
||||
category = "vendor"
|
||||
name = "Example Company"
|
||||
namespace = "https://example.com"
|
||||
issuing_authority = "We at Example Company are responsible for publishing and maintaining Product Y."
|
||||
contact_details = "Example Company can be reached at contact_us@example.com, or via our website at https://www.example.com/contact."
|
||||
|
|
@ -51,9 +51,9 @@ location /cgi-bin/ {
|
|||
|
||||
# Adjust non standard parameters (SCRIPT_FILENAME)
|
||||
fastcgi_param SCRIPT_FILENAME /usr/lib$fastcgi_script_name;
|
||||
|
||||
fastcgi_param PATH_INFO $fastcgi_path_info;
|
||||
fastcgi_param CSAF_CONFIG /usr/lib/csaf/config.toml;
|
||||
|
||||
fastcgi_param CSAF_CONFIG /etc/csaf/config.toml;
|
||||
|
||||
fastcgi_param SSL_CLIENT_VERIFY $ssl_client_verify;
|
||||
fastcgi_param SSL_CLIENT_S_DN $ssl_client_s_dn;
|
||||
|
|
@ -78,6 +78,9 @@ server {
|
|||
|
||||
# directory listings
|
||||
autoindex on;
|
||||
|
||||
# allow others web applications to get the static information
|
||||
add_header Access-Control-Allow-Origin "*";
|
||||
}
|
||||
|
||||
# enable CGI
|
||||
|
|
@ -92,7 +95,7 @@ Create `cgi-bin` folder if it not exists: `mkdir -p /usr/lib/cgi-bin/`.
|
|||
Rename and place the `csaf_provider` binary file under `/usr/lib/cgi-bin/csaf_provider.go`.
|
||||
|
||||
|
||||
Create configuration file under `/usr/lib/csaf/config.toml`
|
||||
Create configuration file under `/etc/csaf/config.toml`
|
||||
and make sure is has good, restrictive permissions.
|
||||
It must be readable by the user(id), which the webserver's fastcgi interface
|
||||
uses to start the CGI-binary with,
|
||||
|
|
@ -103,26 +106,34 @@ Many systems use `www-data` as user id, so you could do something like
|
|||
<!-- MARKDOWN-AUTO-DOCS:START (CODE:src=../docs/scripts/setupProviderForITest.sh&lines=84-86) -->
|
||||
<!-- The below code snippet is automatically added from ../docs/scripts/setupProviderForITest.sh -->
|
||||
```sh
|
||||
sudo touch /usr/lib/csaf/config.toml
|
||||
sudo chgrp www-data /usr/lib/csaf/config.toml
|
||||
sudo chmod g+r,o-rwx /usr/lib/csaf/config.toml
|
||||
sudo touch /etc/csaf/config.toml
|
||||
sudo chgrp www-data /etc/csaf/config.toml
|
||||
sudo chmod g+r,o-rwx /etc/csaf/config.toml
|
||||
```
|
||||
<!-- MARKDOWN-AUTO-DOCS:END -->
|
||||
|
||||
**This and the other settings are just examples, please adjust permissions and paths according to your webserver and security needs.**
|
||||
**This and the other settings are just examples,**
|
||||
**please adjust permissions and paths**
|
||||
**according to your webserver and security needs.**
|
||||
|
||||
<!-- MARKDOWN-AUTO-DOCS:START (CODE:src=../docs/scripts/setupProviderForITest.sh&lines=94-99) -->
|
||||
Here is a minimal example configuration,
|
||||
which you need to customize for a production setup,
|
||||
see the [options of `csaf_provider`](https://github.com/gocsaf/csaf/blob/main/docs/csaf_provider.md).
|
||||
|
||||
<!-- MARKDOWN-AUTO-DOCS:START (CODE:src=../docs/scripts/setupProviderForITest.sh&lines=94-101) -->
|
||||
<!-- The below code snippet is automatically added from ../docs/scripts/setupProviderForITest.sh -->
|
||||
```sh
|
||||
# upload_signature = true
|
||||
openpgp_private_key = "/usr/lib/csaf/private.asc"
|
||||
openpgp_public_key = "/usr/lib/csaf/public.asc"
|
||||
openpgp_private_key = "/etc/csaf/private.asc"
|
||||
openpgp_public_key = "/etc/csaf/public.asc"
|
||||
#tlps = ["green", "red"]
|
||||
canonical_url_prefix = "https://localhost:8443"
|
||||
categories = ["Example Company Product A", "expr:document.lang"]
|
||||
create_service_document = true
|
||||
#no_passphrase = true
|
||||
```
|
||||
<!-- MARKDOWN-AUTO-DOCS:END -->
|
||||
with suitable [replacements](#provider-options)
|
||||
|
||||
|
||||
**Attention:** You need to properly protect the private keys
|
||||
for the OpenPGP and TLS crypto setup. A few variants are possible
|
||||
|
|
@ -133,7 +144,7 @@ on a GNU/Linux operating system.
|
|||
|
||||
Create the folders:
|
||||
```(shell)
|
||||
curl https://192.168.56.102/cgi-bin/csaf_provider.go/create --cert-type p12 --cert {clientCertificat.p12}
|
||||
curl https://192.168.56.102/cgi-bin/csaf_provider.go/api/create --cert-type p12 --cert {clientCertificat.p12}
|
||||
```
|
||||
Replace {clientCertificate.p12} with the client certificate file
|
||||
in pkcs12 format which includes the corresponding key as well.
|
||||
|
|
@ -147,7 +158,7 @@ Again replacing `{clientCert.crt}` and `{clientKey.pem}` accordingly.
|
|||
|
||||
|
||||
To let nginx resolves the DNS record `csaf.data.security.domain.tld` to fulfill the [Requirement 10](https://docs.oasis-open.org/csaf/csaf/v2.0/cs01/csaf-v2.0-cs01.html#7110-requirement-10-dns-path) configure a new server block (virtual host) in a separated file under `/etc/nginx/available-sites/{DNSNAME}` like following:
|
||||
<!-- MARKDOWN-AUTO-DOCS:START (CODE:src=../docs/scripts/DNSConfigForItest.sh&lines=18-35) -->
|
||||
<!-- MARKDOWN-AUTO-DOCS:START (CODE:src=../docs/scripts/DNSConfigForItest.sh&lines=18-37) -->
|
||||
<!-- The below code snippet is automatically added from ../docs/scripts/DNSConfigForItest.sh -->
|
||||
```sh
|
||||
server {
|
||||
|
|
@ -161,7 +172,7 @@ To let nginx resolves the DNS record `csaf.data.security.domain.tld` to fulfill
|
|||
|
||||
server_name ${DNS_NAME}; # e.g. server_name csaf.data.security.domain.tld;
|
||||
|
||||
location / {
|
||||
location = / {
|
||||
try_files /.well-known/csaf/provider-metadata.json =404;
|
||||
}
|
||||
|
||||
|
|
@ -177,32 +188,6 @@ ln -s /etc/nginx/sites-available/{DNSNAME} /etc/nginx/sites-enabled/
|
|||
```
|
||||
Replace {DNSNAME} with a server block file name.
|
||||
|
||||
## Provider options
|
||||
Provider has many config options described as following:
|
||||
|
||||
- password: Authentication password for accessing the CSAF provider. This is
|
||||
a simple authentication method useful for testing or as additional shareable password in combination with TLS client certificates.
|
||||
- key: The private OpenPGP key.
|
||||
- folder: Specify the root folder. Default: `/var/www/`.
|
||||
- web: Specify the web folder. Default: `/var/www/html`.
|
||||
- tlps: Set the allowed TLP comming with the upload request (one or more of "csaf", "white", "amber", "green", "red").
|
||||
The "csaf" selection lets the provider takes the value from the CSAF document.
|
||||
These affects the list items in the web interface.
|
||||
Default: `["csaf", "white", "amber", "green", "red"]`.
|
||||
- upload_signature: Send signature with the request, an additional input-field in the web interface will be shown to let user enter an ascii armored signature. Default: `false`.
|
||||
- openpgp_url: URL to OpenPGP key-server. Default: `https://openpgp.circl.lu`.
|
||||
- canonical_url_prefix: start of the URL where contents shall be accessible from the internet. Default: `https://$SERVER_NAME`.
|
||||
- no_passphrase: Let user send the passphrase for the OpenPGP key with the request, if set to true the input-field in the web interface will not appear. Default: `false`.
|
||||
- no_validation: Validate the uploaded CSAF document against the JSON schema. Default: `false`.
|
||||
- no_web_ui: Disable the web interface. Default: `false`.
|
||||
- dynamic_provider_metadata: Take the publisher from the CSAF document. Default: `false`.
|
||||
- provider_metadata: Configure the provider metadata.
|
||||
- provider_metadata.list_on_CSAF_aggregators: List on aggregators
|
||||
- provider_metadata.mirror_on_CSAF_aggregators: Mirror on aggregators
|
||||
- provider_metadata.publisher: Set the publisher. Default: `{"category"= "vendor", "name"= "Example", "namespace"= "https://example.com"}`.
|
||||
- upload_limit: Set the upload limit size of the file. Default: `50 MiB`.
|
||||
- issuer: The issuer of the CA, which if set, restricts the writing permission and the accessing to the web-interface to only the client certificates signed with this CA.
|
||||
|
||||
|
||||
### Security considerations
|
||||
|
||||
|
|
|
|||
138
docs/proxy-provider-for-aggregator.md
Normal file
138
docs/proxy-provider-for-aggregator.md
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
If an organisation publishes their advisories via the internet
|
||||
as valid CSAF documents, with good filenames and using TLS,
|
||||
the [CSAF specification](https://docs.oasis-open.org/csaf/csaf/v2.0/csaf-v2.0.md)
|
||||
calls it a *CSAF publisher*.
|
||||
|
||||
After manually downloading the advisories from such a publisher,
|
||||
the tools here can be used to offer the CSAF files for automated downloading
|
||||
as *CSAF aggregator*. (The construct is called *CSAF proxy provider*.
|
||||
See [Section 7.2.5](https://docs.oasis-open.org/csaf/csaf/v2.0/os/csaf-v2.0-os.html#725-role-csaf-aggregator)
|
||||
for more details.)
|
||||
|
||||
There are three necessary steps, easiest is to use
|
||||
one single virtual maschine (or container) per internal provider.
|
||||
Use a different port for each.
|
||||
Other setups are possible of course, e.g. virtual hosts
|
||||
or dynamic settings using nginx configuration methods.
|
||||
(Of course: adapt it to your security needs and procedures,
|
||||
ask someone with experience to administrate your web server.)
|
||||
|
||||
|
||||
### Setup provider api via FastCGI
|
||||
|
||||
Follow the [general instructions to setup the `csaf_provider` as FastCGI binary](provider-setup.md),
|
||||
but differ in the following ways:
|
||||
|
||||
Recommended is to use non-standard TLS port and an internal domain name.
|
||||
|
||||
For each internal provider a customized configuration file
|
||||
must point to a place which can be served via a web server internally
|
||||
later, for e.g. here is a potential config file to be saved
|
||||
at `/etc/csaf/internal-provider1.toml`:
|
||||
|
||||
```toml
|
||||
openpgp_private_key = "/etc/csaf/real_private.asc"
|
||||
openpgp_public_key = "/etc/csaf/real_public.asc"
|
||||
tlps = ["white"]
|
||||
canonical_url_prefix = "https://nein.ntvtn.de:10443"
|
||||
categories = ["Example Company Product B", "expr:document.lang"]
|
||||
create_service_document = true
|
||||
folder = "/var/www-p1/"
|
||||
web = "/var/www-p1/html"
|
||||
```
|
||||
|
||||
For `csaf_provider.go` to find this file, you need to adjust
|
||||
the path via the variable, normally set in `/etc/nginx/fcgiwrap.conf`:
|
||||
```nginx
|
||||
fastcgi_param CSAF_CONFIG /etc/csaf/internal-provider1.toml;
|
||||
```
|
||||
|
||||
(Careful: setting the variable a second time will transfer both values to
|
||||
fcgiwrap via an array. It is not guaranteed that the last value will be
|
||||
used. So if you are thinking about setting this variable dynamically,
|
||||
you need to make sure that is set only once.)
|
||||
|
||||
For example you can clone the files
|
||||
```bash
|
||||
sudo cp /etc/nginx/fcgiwrap.conf /etc/nginx/fcgiwrap-p1.conf
|
||||
sudo vim /etc/nginx/fcgiwrap-p1.conf
|
||||
sudo cp /etc/nginx/sites-available/default /etc/nginx/sites-available/internal-p1-cgi
|
||||
sudo ln -s /etc/nginx/sites-available/internal-p1-cgi /etc/nginx/sites-enabled/
|
||||
sudo vim /etc/nginx/sites-available/internal-p1-cgi
|
||||
```
|
||||
and then set the right config file and port like
|
||||
|
||||
```nginx
|
||||
include fcgiwrap-p1.conf;
|
||||
listen 10001 ssl default_server; # ipv4
|
||||
listen [::]:10001 ssl http2 default_server; # ipv6
|
||||
```
|
||||
|
||||
|
||||
#### Networking
|
||||
Make sure the people responsible for doing the manual uploads
|
||||
can access the port where the CGI script can be called.
|
||||
|
||||
|
||||
### Setup internal CSAF provider
|
||||
|
||||
Now serve the written `html` directory via a webserver, but only
|
||||
internally. For nginx, you can follow the setup docs and for example
|
||||
limit the interfaces where it is listening in the `listen` directive.
|
||||
The following setting will only respond to requests
|
||||
on the loopback interface on port 10443 with TLS.
|
||||
|
||||
```nginx
|
||||
listen localhost:10443 ssl default_server;
|
||||
listen [::1]:10443 ssl default_server;
|
||||
root /var/www-p1/html;
|
||||
```
|
||||
|
||||
(Don't forget to reload nginx, so it gets the config change.)
|
||||
|
||||
|
||||
#### Networking
|
||||
Make sure the port can be reached by the server
|
||||
where the `csaf_aggregator` is started, but cannot be reached from
|
||||
an outside system.
|
||||
|
||||
This could be done by an ssh (or other VPN) tunnel.
|
||||
|
||||
|
||||
### Add to aggregator configuration
|
||||
|
||||
#### Networking
|
||||
Make sure that you have a local domain name that resolves
|
||||
to our internal provider host, but is fine to be exposed in public.
|
||||
As the domain name can be seen in the resulting `aggregator.json`.
|
||||
|
||||
One simple method to do this, is by using an entry in
|
||||
`/etc/hosts`:
|
||||
|
||||
```
|
||||
192.168.2.2 nein.ntvtn.de
|
||||
```
|
||||
|
||||
Consult your network admin for a secure setup.
|
||||
|
||||
|
||||
#### aggregator.toml
|
||||
Add a section to the aggregator configuration file,
|
||||
to it is used next time when `csaf_aggregator` does a full run, e.g.:
|
||||
|
||||
```toml
|
||||
[[providers]]
|
||||
name = "example-proxy-provider"
|
||||
domain = "https://nein.ntvtn.de:10443/.well-known/csaf/provider-metadata.json"
|
||||
```
|
||||
|
||||
Only makes sense if aggregator.category is set to `aggregator` (mirror mode).
|
||||
|
||||
Depending on how you do the "tunneling" you can add `insecure = true`
|
||||
to the section, if you are sure if nobody can mess with your internal DNS.
|
||||
This deactivates the checking of the root for the TLS certificate.
|
||||
Alternatively you can import the cert of the root CA for the internal
|
||||
provider to the system root certificate store, which `csaf_aggregator`
|
||||
is using.
|
||||
|
||||
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# This file is Free Software under the MIT License
|
||||
# without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
# This file is Free Software under the Apache-2.0 License
|
||||
# without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
# SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
# Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -26,8 +26,10 @@ echo "
|
|||
|
||||
server_name ${DNS_NAME}; # e.g. server_name csaf.data.security.domain.tld;
|
||||
|
||||
location / {
|
||||
location = / {
|
||||
try_files /.well-known/csaf/provider-metadata.json =404;
|
||||
# allow others web applications to get the static information
|
||||
add_header Access-Control-Allow-Origin "*";
|
||||
}
|
||||
|
||||
access_log /var/log/nginx/dns-domain_access.log;
|
||||
|
|
|
|||
|
|
@ -1,23 +1,24 @@
|
|||
Scripts for assisting the Integration tests. They are written on Ubuntu 20.04 TLS amd64.
|
||||
Scripts for assisting the Integration tests.
|
||||
They were written on Ubuntu 20.04 LTS amd64 and also tested with 24.04 LTS.
|
||||
|
||||
- `prepareUbunutForITest.sh` installs the required packages for the csaf_distribution integration tests on a naked ubuntu 20.04 LTS amd64.
|
||||
- `prepareUbuntuInstanceForITests.sh` installs the required packages for the csaf integration tests on a naked Ubuntu LTS amd64.
|
||||
|
||||
- `TLSConfigsForITest.sh` generates a root CA and webserver cert by running `createRootCAForITest.sh` and `createWebserverCertForITest.sh`
|
||||
and configures nginx for serving TLS connections.
|
||||
|
||||
- `TLSClientConfigsForITest.sh` generates client certificates by calling `createCCForITest.sh` which uses the root certificate initialized before with `createRootCAForITest.sh`. It configures nginx to enable the authentication with client certificate. (This assumes that the same folder name is used to create the root certificate)
|
||||
|
||||
- `setupProviderForITest.sh` builds the csaf_provider, writes the required nginx configurations and create the initial folders. IT calls `uploadToProvider.sh` to upload some csaf example files to the provider.
|
||||
- `setupProviderForITest.sh` builds the `csaf_provider`, writes the required nginx configurations and create the initial folders. IT calls `uploadToProvider.sh` to upload some csaf example files to the provider.
|
||||
|
||||
As creating the folders needs to authenticate with the csaf_provider, the configurations of TLS server and Client certificate authentication should be set. So it is recommended to call the scripts in this order: `TLSConfigsForITest.sh`, `TLSClientConfigsForITest.sh`, `setupProviderForITest.sh`
|
||||
As creating the folders needs to authenticate with the `csaf_provider`, the configurations of TLS server and Client certificate authentication should be set. So it is recommended to call the scripts in this order: `TLSConfigsForITest.sh`, `TLSClientConfigsForITest.sh`, `setupProviderForITest.sh`
|
||||
|
||||
Calling example (as root):
|
||||
Calling example (as user with sudo privileges):
|
||||
``` bash
|
||||
curl --fail -O https://raw.githubusercontent.com/csaf-poc/csaf_distribution/main/docs/scripts/prepareUbuntuInstanceForITests.sh
|
||||
bash prepareUbuntuInstanceForITests.sh
|
||||
curl --fail -O https://raw.githubusercontent.com/gocsaf/csaf/main/docs/scripts/prepareUbuntuInstanceForITests.sh
|
||||
sudo bash prepareUbuntuInstanceForITests.sh
|
||||
|
||||
git clone https://github.com/csaf-poc/csaf_distribution.git
|
||||
pushd csaf_distribution/docs/scripts/
|
||||
git clone https://github.com/gocsaf/csaf.git # --branch <name>
|
||||
pushd csaf/docs/scripts/
|
||||
|
||||
export FOLDERNAME=devca1 ORGANAME="CSAF Tools Development (internal)"
|
||||
source ./TLSConfigsForITest.sh
|
||||
|
|
@ -25,4 +26,5 @@ Calling example (as root):
|
|||
./TLSClientConfigsForITest.sh
|
||||
./setupProviderForITest.sh
|
||||
./testAggregator.sh
|
||||
./testDownloader.sh
|
||||
```
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# This file is Free Software under the MIT License
|
||||
# without warranty, see README.md and LICENSES/MIT.txt for details.
|
||||
# This file is Free Software under the Apache-2.0 License
|
||||
# without warranty, see README.md and LICENSES/Apache-2.0.txt for details.
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
# SPDX-FileCopyrightText: 2022 German Federal Office for Information Security (BSI) <https://www.bsi.bund.de>
|
||||
# Software-Engineering: 2022 Intevation GmbH <https://intevation.de>
|
||||
|
|
@ -18,7 +18,7 @@ set -e
|
|||
|
||||
NGINX_CONFIG_PATH=/etc/nginx/sites-available/default
|
||||
|
||||
cd ~/csaf_distribution/docs/scripts/
|
||||
cd ~/csaf/docs/scripts/
|
||||
source ./createCCForITest.sh
|
||||
|
||||
echo '
|
||||
|
|
@ -33,9 +33,7 @@ echo '
|
|||
autoindex on;
|
||||
# in this location access is only allowed with client certs
|
||||
if ($ssl_client_verify != SUCCESS){
|
||||
# we use status code 404 == "Not Found", because we do not
|
||||
# want to reveal if this location exists or not.
|
||||
return 404;
|
||||
return 403;
|
||||
}
|
||||
}
|
||||
'> ~/${FOLDERNAME}/clientCertificateConfigs.txt
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue