Skip to content

Commit 3c0854d

Browse files
committed
Merge remote-tracking branch 'origin/main' into openssl-advisories
2 parents a3ab905 + 584b077 commit 3c0854d

19 files changed

+301
-132
lines changed

.github/workflows/docs.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ on: [push, pull_request]
44

55
jobs:
66
build:
7-
runs-on: ubuntu-20.04
7+
runs-on: ubuntu-22.04
88

99
strategy:
1010
max-parallel: 4

.github/workflows/main.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ env:
99

1010
jobs:
1111
build:
12-
runs-on: ubuntu-20.04
12+
runs-on: ubuntu-22.04
1313

1414
services:
1515
postgres:

.github/workflows/pypi-release.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ on:
2121
jobs:
2222
build-pypi-distribs:
2323
name: Build and publish library to PyPI
24-
runs-on: ubuntu-20.04
24+
runs-on: ubuntu-22.04
2525

2626
steps:
2727
- uses: actions/checkout@master

docs/source/conf.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,8 @@
3636
"https://www.softwaretestinghelp.com/how-to-write-good-bug-report/", # Cloudflare protection
3737
"https://www.openssl.org/news/vulnerabilities.xml", # OpenSSL legacy advisory URL, not longer available
3838
"https://example.org/api/non-existent-packages",
39+
"https://github.com/aboutcode-org/vulnerablecode/pull/495/commits",
40+
"https://nvd.nist.gov/products/cpe",
3941
]
4042

4143
# Add any Sphinx extension module names here, as strings. They can be

vulnerabilities/import_runner.py

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -104,24 +104,30 @@ def process_advisories(
104104
advisories = []
105105
for data in advisory_datas:
106106
content_id = compute_content_id(advisory_data=data)
107+
advisory = {
108+
"summary": data.summary,
109+
"affected_packages": [pkg.to_dict() for pkg in data.affected_packages],
110+
"references": [ref.to_dict() for ref in data.references],
111+
"date_published": data.date_published,
112+
"weaknesses": data.weaknesses,
113+
"created_by": importer_name,
114+
"date_collected": datetime.datetime.now(tz=datetime.timezone.utc),
115+
}
107116
try:
108117
aliases = get_or_create_aliases(aliases=data.aliases)
109118
obj, created = Advisory.objects.get_or_create(
110119
unique_content_id=content_id,
111120
url=data.url,
112-
defaults={
113-
"summary": data.summary,
114-
"affected_packages": [pkg.to_dict() for pkg in data.affected_packages],
115-
"references": [ref.to_dict() for ref in data.references],
116-
"date_published": data.date_published,
117-
"weaknesses": data.weaknesses,
118-
"created_by": importer_name,
119-
"date_collected": datetime.datetime.now(tz=datetime.timezone.utc),
120-
},
121+
defaults=advisory,
121122
)
122123
obj.aliases.add(*aliases)
123124
if not obj.date_imported:
124125
advisories.append(obj)
126+
except Advisory.MultipleObjectsReturned:
127+
logger.error(
128+
f"Multiple Advisories returned: unique_content_id: {content_id}, url: {data.url}, advisory: {advisory!r}"
129+
)
130+
raise
125131
except Exception as e:
126132
logger.error(
127133
f"Error while processing {data!r} with aliases {data.aliases!r}: {e!r} \n {traceback_format_exc()}"
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
# Generated by Django 4.2.17 on 2025-04-04 16:08
2+
3+
from django.db import migrations, models
4+
5+
6+
class Migration(migrations.Migration):
7+
8+
dependencies = [
9+
("vulnerabilities", "0090_migrate_advisory_aliases"),
10+
]
11+
12+
operations = [
13+
migrations.AlterUniqueTogether(
14+
name="advisory",
15+
unique_together=set(),
16+
),
17+
migrations.AlterField(
18+
model_name="advisory",
19+
name="unique_content_id",
20+
field=models.CharField(
21+
help_text="A 64 character unique identifier for the content of the advisory since we use sha256 as hex",
22+
max_length=64,
23+
unique=True,
24+
),
25+
),
26+
migrations.AlterField(
27+
model_name="advisory",
28+
name="url",
29+
field=models.URLField(help_text="Link to the advisory on the upstream website"),
30+
),
31+
]

vulnerabilities/models.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1321,6 +1321,7 @@ class Advisory(models.Model):
13211321
max_length=64,
13221322
blank=False,
13231323
null=False,
1324+
unique=True,
13241325
help_text="A 64 character unique identifier for the content of the advisory since we use sha256 as hex",
13251326
)
13261327
aliases = models.ManyToManyField(
@@ -1355,14 +1356,14 @@ class Advisory(models.Model):
13551356
"vulnerabilities.pipeline.nginx_importer.NginxImporterPipeline",
13561357
)
13571358
url = models.URLField(
1358-
blank=True,
1359+
blank=False,
1360+
null=False,
13591361
help_text="Link to the advisory on the upstream website",
13601362
)
13611363

13621364
objects = AdvisoryQuerySet.as_manager()
13631365

13641366
class Meta:
1365-
unique_together = ["unique_content_id", "date_published", "url"]
13661367
ordering = ["date_published", "unique_content_id"]
13671368

13681369
def save(self, *args, **kwargs):

vulnerabilities/pipelines/alpine_linux_importer.py

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,8 @@ def load_advisories(
195195
level=logging.DEBUG,
196196
)
197197
continue
198-
198+
# fixed_vulns is a list of strings and each string is a space-separated
199+
# list of aliases and CVES
199200
for vuln_ids in fixed_vulns:
200201
if not isinstance(vuln_ids, str):
201202
if logger:
@@ -204,15 +205,16 @@ def load_advisories(
204205
level=logging.DEBUG,
205206
)
206207
continue
207-
vuln_ids = vuln_ids.split()
208-
aliases = []
209-
vuln_id = vuln_ids[0]
210-
# check for valid vuln ID, if there is valid vuln ID then iterate over
211-
# the remaining elements of the list else iterate over the whole list
212-
# and also check if the initial element is a reference or not
213-
if is_cve(vuln_id):
214-
aliases = [vuln_id]
215-
vuln_ids = vuln_ids[1:]
208+
vuln_ids = vuln_ids.strip().split()
209+
if not vuln_ids:
210+
if logger:
211+
logger(
212+
f"{vuln_ids!r} is empty",
213+
level=logging.DEBUG,
214+
)
215+
continue
216+
aliases = vuln_ids
217+
216218
references = []
217219
for reference_id in vuln_ids:
218220

@@ -225,6 +227,10 @@ def load_advisories(
225227
elif reference_id.startswith("wnpa-sec"):
226228
references.append(WireSharkReference.from_id(wnpa_sec_id=reference_id))
227229

230+
elif not reference_id.startswith("CVE"):
231+
if logger:
232+
logger(f"Unknown reference id {reference_id!r}", level=logging.DEBUG)
233+
228234
qualifiers = {
229235
"distroversion": distroversion,
230236
"reponame": reponame,

vulnerabilities/pipes/advisory.py

Lines changed: 22 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -43,20 +43,27 @@ def insert_advisory(advisory: AdvisoryData, pipeline_id: str, logger: Callable =
4343
aliases = get_or_create_aliases(aliases=advisory.aliases)
4444
content_id = compute_content_id(advisory_data=advisory)
4545
try:
46+
default_data = {
47+
"summary": advisory.summary,
48+
"affected_packages": [pkg.to_dict() for pkg in advisory.affected_packages],
49+
"references": [ref.to_dict() for ref in advisory.references],
50+
"date_published": advisory.date_published,
51+
"weaknesses": advisory.weaknesses,
52+
"created_by": pipeline_id,
53+
"date_collected": datetime.now(timezone.utc),
54+
}
55+
4656
advisory_obj, _ = Advisory.objects.get_or_create(
4757
unique_content_id=content_id,
4858
url=advisory.url,
49-
defaults={
50-
"summary": advisory.summary,
51-
"affected_packages": [pkg.to_dict() for pkg in advisory.affected_packages],
52-
"references": [ref.to_dict() for ref in advisory.references],
53-
"date_published": advisory.date_published,
54-
"weaknesses": advisory.weaknesses,
55-
"created_by": pipeline_id,
56-
"date_collected": datetime.now(timezone.utc),
57-
},
59+
defaults=default_data,
5860
)
5961
advisory_obj.aliases.add(*aliases)
62+
except Advisory.MultipleObjectsReturned:
63+
logger.error(
64+
f"Multiple Advisories returned: unique_content_id: {content_id}, url: {advisory.url}, advisory: {advisory!r}"
65+
)
66+
raise
6067
except Exception as e:
6168
if logger:
6269
logger(
@@ -137,19 +144,18 @@ def import_advisory(
137144
},
138145
)
139146
vulnerability.severities.add(vulnerability_severity)
147+
if not created and logger:
148+
logger(
149+
f"Severity updated for reference {ref.url!r} to value: {severity.value!r} "
150+
f"and scoring_elements: {severity.scoring_elements!r}",
151+
level=logging.DEBUG,
152+
)
140153
except:
141154
if logger:
142155
logger(
143156
f"Failed to create VulnerabilitySeverity for: {severity} with error:\n{traceback_format_exc()}",
144157
level=logging.ERROR,
145158
)
146-
if not created:
147-
if logger:
148-
logger(
149-
f"Severity updated for reference {ref.url!r} to value: {severity.value!r} "
150-
f"and scoring_elements: {severity.scoring_elements!r}",
151-
level=logging.DEBUG,
152-
)
153159

154160
for affected_purl in affected_purls or []:
155161
vulnerable_package, _ = Package.objects.get_or_create_from_purl(purl=affected_purl)

vulnerabilities/tests/conftest.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ def no_rmtree(monkeypatch):
2525
# Step 2: Run test for importer only if it is activated (pytestmark = pytest.mark.skipif(...))
2626
# Step 3: Migrate all the tests
2727
collect_ignore = [
28-
"test_models.py",
2928
"test_rust.py",
3029
"test_suse_backports.py",
3130
"test_suse.py",

0 commit comments

Comments
 (0)