Skip to content

Commit 456a0b2

Browse files
committed
fix tests
1 parent 98ab7ae commit 456a0b2

16 files changed

Lines changed: 153 additions & 174 deletions

File tree

docs/content/en/connecting_your_tools/parsers/file/openvas.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,3 +15,9 @@ By default, DefectDojo identifies duplicate Findings using these [hashcode field
1515
- line
1616
- file path
1717
- description
18+
19+
### Parser V2 Changes
20+
Version 2 comes with multiple improvments:
21+
- Increased parsing Consistensy between the xml and csv parser
22+
- Combined findings where the only differences are in fields that can’t be rehashed due to inconsistent values between scans e.g fields with timestamps or packet ids.
23+
- Parser now combines multiple identical findings with different endpoints into one findings with multiple endpoints (instead of multiple findings with one endpoint each)

docs/content/en/connecting_your_tools/parsers/file/openvas_v2.md

Lines changed: 0 additions & 16 deletions
This file was deleted.

dojo/settings/settings.dist.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1348,7 +1348,7 @@ def saml2_attrib_map_format(din):
13481348
"Qualys Hacker Guardian Scan": ["title", "severity", "description"],
13491349
"Cyberwatch scan (Galeax)": ["title", "description", "severity"],
13501350
"Cycognito Scan": ["title", "severity"],
1351-
"OpenVAS Parser v2": ["title", "unique_id_from_tool", "vuln_id_from_tool"],
1351+
"OpenVAS Parser v2": ["title", "severity", "vuln_id_from_tool"],
13521352
}
13531353

13541354
# Override the hardcoded settings here via the env var

dojo/tools/factory.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,12 @@ def requires_tool_type(scan_type):
119119
module = import_module(f"dojo.tools.{module_name}.parser")
120120
for attribute_name in dir(module):
121121
attribute = getattr(module, attribute_name)
122-
if isclass(attribute) and attribute_name.lower() == module_name.replace("_", "") + "parser":
122+
# Allow parser class names with optional v[number] suffix (e.g., OpenVASParser, OpenVASParserV2)
123+
expected_base = module_name.replace("_", "") + "parser"
124+
if isclass(attribute) and (
125+
attribute_name.lower() == expected_base or
126+
re.match(rf"^{re.escape(expected_base)}v\d+$", attribute_name.lower())
127+
):
123128
register(attribute)
124129
except:
125130
logger.exception(f"failed to load {module_name}")

dojo/tools/openvas/parser.py

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
1-
from dojo.tools.openvas.csv_parser import OpenVASCSVParser
2-
from dojo.tools.openvas.xml_parser import OpenVASXMLParser
1+
from dojo.tools.openvas.parser_v1.csv_parser import OpenVASCSVParser
2+
from dojo.tools.openvas.parser_v1.xml_parser import OpenVASXMLParser
3+
from dojo.tools.openvas.parser_v2.csv_parser import OpenVASCSVParserV2
4+
from dojo.tools.openvas.parser_v2.xml_parser import OpenVASXMLParserV2
35

46

57
class OpenVASParser:
@@ -18,3 +20,21 @@ def get_findings(self, filename, test):
1820
if str(filename.name).endswith(".xml"):
1921
return OpenVASXMLParser().get_findings(filename, test)
2022
return None
23+
24+
25+
class OpenVASParserV2:
26+
def get_scan_types(self):
27+
return ["OpenVAS Parser v2"]
28+
29+
def get_label_for_scan_types(self, scan_type):
30+
return scan_type
31+
32+
def get_description_for_scan_types(self, scan_type):
33+
return "Import CSV or XML output of Greenbone OpenVAS report."
34+
35+
def get_findings(self, filename, test):
36+
if str(filename.name).endswith(".csv"):
37+
return OpenVASCSVParserV2().get_findings(filename, test)
38+
if str(filename.name).endswith(".xml"):
39+
return OpenVASXMLParserV2().get_findings(filename, test)
40+
return None

dojo/tools/openvas/parser_v1/__init__.py

Whitespace-only changes.
File renamed without changes.
File renamed without changes.

dojo/tools/openvas/parser_v2/__init__.py

Whitespace-only changes.
Lines changed: 4 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,6 @@ def update_finding(finding: Finding, aux_info: OpenVASFindingAuxData):
3636
def deduplicate(dupes: dict[str, Finding], finding: Finding):
3737
"""Combine multiple openvas findings into one defectdojo finding with multiple endpoints"""
3838
finding_hash = dedup_finding_hash(finding)
39-
# deliberately missuse unique_id_from_tool to save some original values
40-
finding.unique_id_from_tool = id_from_tool_finding_hash(finding)
4139

4240
if finding_hash not in dupes:
4341
dupes[finding_hash] = finding
@@ -70,28 +68,13 @@ def deduplicate(dupes: dict[str, Finding], finding: Finding):
7068
def id_from_tool_finding_hash(finding: Finding):
7169
"""Generate a hash that complements final hash generating outside of this parser"""
7270
endpoint = finding.unsaved_endpoints[0]
73-
hash_data = [
74-
str(endpoint.protocol),
75-
str(endpoint.userinfo),
76-
str(endpoint.port), # keep findings on different port seperate as it may be different applications
77-
str(endpoint.path),
78-
str(endpoint.fragment),
79-
finding.severity, # allows changing severity of finding after import
80-
]
81-
return hashlib.sha256("|".join(hash_data).encode("utf-8")).hexdigest()
8271

72+
if "endpoints" in HASHCODE_FIELDS_PER_SCANNER["OpenVAS Parser v2"]:
73+
pass
8374

84-
def dedup_finding_hash(finding: Finding):
85-
"""Generate a hash for a finding that is used for deduplication of findings inside the current report"""
86-
endpoint = finding.unsaved_endpoints[0]
8775
hash_data = [
8876
str(endpoint.protocol),
89-
str(endpoint.userinfo),
90-
str(endpoint.port),
91-
str(endpoint.path),
92-
str(endpoint.fragment),
93-
finding.title,
94-
finding.vuln_id_from_tool,
95-
finding.severity,
77+
str(endpoint.port), # keep findings on different port seperate as it may be different applications
78+
finding.severity, # allows changing severity of finding after import
9679
]
9780
return hashlib.sha256("|".join(hash_data).encode("utf-8")).hexdigest()

0 commit comments

Comments
 (0)