From b8fcfb1da9aa72e17efe28f74d0f4ede307d3b7e Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Tue, 16 Sep 2025 18:04:45 +0200 Subject: [PATCH 01/15] fixed merge conflict --- dojo/settings/settings.dist.py | 1 + dojo/tools/openvas/common.py | 52 +++ dojo/tools/openvas/csv_parser.py | 392 ++++-------------- dojo/tools/openvas/xml_parser.py | 107 +++-- ...port_using_openVAS_findings_to_combine.csv | 57 +++ unittests/tools/test_openvas_parser.py | 14 +- 6 files changed, 259 insertions(+), 364 deletions(-) create mode 100644 dojo/tools/openvas/common.py create mode 100644 unittests/scans/openvas/report_using_openVAS_findings_to_combine.csv diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 76a82c39c28..db43dad1f9e 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1613,6 +1613,7 @@ def saml2_attrib_map_format(din): "Red Hat Satellite": DEDUPE_ALGO_HASH_CODE, "Qualys Hacker Guardian Scan": DEDUPE_ALGO_HASH_CODE, "Cyberwatch scan (Galeax)": DEDUPE_ALGO_HASH_CODE, + "OpenVAS Parser": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, } # Override the hardcoded settings here via the env var diff --git a/dojo/tools/openvas/common.py b/dojo/tools/openvas/common.py new file mode 100644 index 00000000000..3b91d33f37c --- /dev/null +++ b/dojo/tools/openvas/common.py @@ -0,0 +1,52 @@ +import hashlib +import os +from dataclasses import dataclass + +from dojo.models import Finding + +OPENVAS_SEVERITY_OVERWRITE = os.environ.get("OPENVAS_SEVERITY_OVERWRITE", "False").lower() in {"true", 1} + + +@dataclass +class OpenVASFindingAuxData: + + """Dataclass to contain all information added later to description""" + + qod: str = "" + + +def is_valid_severity(severity): + valid_severity = ("Info", "Low", "Medium", "High", "Critical") + return severity in valid_severity + + +def update_description(finding: Finding, aux_info: OpenVASFindingAuxData): + finding.description += f"\n**QoD**: {aux_info.qod}" + + +def deduplicate(dupes: dict[str, Finding], finding: Finding): + key = generate_openvas_finding_hash(finding) + # set for use in global deduplication + finding.unique_id_from_tool = key + + if key not in dupes: + dupes[key] = finding + else: + # openvas does not combine findings of the same type + # that are listed as multiple findings on the same host e.g + # a vunerability in the java runtime may be reported 2 times + # if 2 vulnerable java runtimes are present on the host + # and the only way do differantiate this findings are the specific results (mapped to references by the parser) + # but we cannot hash this field as this field can contain data that changes between scans e.g timestamps + # we therfore combine them because duplicates during reimport cause + # https://github.com/DefectDojo/django-DefectDojo/issues/3958 + org = dupes[key] + if org.references != finding.references: + org.references += "\n---------------------------------------\n" + org.references += finding.references + + +def generate_openvas_finding_hash(finding: Finding): + """Generate a hash for a finding that is used for deduplication of findings inside the current report""" + hash_data = [str(finding.unsaved_endpoints[0]), finding.title, finding.description, finding.severity] + return hashlib.sha256("|".join(hash_data).encode("utf-8")).hexdigest() diff --git a/dojo/tools/openvas/csv_parser.py b/dojo/tools/openvas/csv_parser.py index 1a87d17b73e..d5f18b9a5ab 100644 --- a/dojo/tools/openvas/csv_parser.py +++ b/dojo/tools/openvas/csv_parser.py @@ -1,327 +1,107 @@ import csv -import hashlib import io import re from dateutil.parser import parse from dojo.models import Endpoint, Finding +from dojo.tools.openvas.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_description -class ColumnMappingStrategy: - mapped_column = None - - def __init__(self): - self.successor = None - - def map_column_value(self, finding, column_value): - pass - - @staticmethod - def evaluate_bool_value(column_value): - if column_value.lower() == "true": - return True - if column_value.lower() == "false": - return False - return None - - def process_column(self, column_name, column_value, finding): - if ( - column_name.lower() == self.mapped_column - and column_value is not None - ): - self.map_column_value(finding, column_value) - elif self.successor is not None: - self.successor.process_column(column_name, column_value, finding) - - -class DateColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "timestamp" - super().__init__() - - def map_column_value(self, finding, column_value): - finding.date = parse(column_value).date() - - -class TitleColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "nvt name" - super().__init__() - - def map_column_value(self, finding, column_value): - finding.title = column_value - - -class CweColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "cweid" - super().__init__() - - def map_column_value(self, finding, column_value): - if column_value.isdigit(): - finding.cwe = int(column_value) - - -class PortColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "port" - super().__init__() - - def map_column_value(self, finding, column_value): - if column_value.isdigit(): - finding.unsaved_endpoints[0].port = int(column_value) - - -class CveColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "cves" - super().__init__() - - def map_column_value(self, finding, column_value): - if column_value: - if "," in column_value: - finding.description += "\n**All CVEs:** " + str(column_value) - for value in column_value.split(","): - finding.unsaved_vulnerability_ids.append(value) - else: - finding.unsaved_vulnerability_ids.append(column_value) - - -class NVDCVEColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "nvt oid" - super().__init__() - - def map_column_value(self, finding, column_value): - cve_pattern = r"CVE-\d{4}-\d{4,7}" - cves = re.findall(cve_pattern, column_value) - for cve in cves: - finding.unsaved_vulnerability_ids.append(cve) - - -class ProtocolColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "port protocol" - super().__init__() - - def map_column_value(self, finding, column_value): - if column_value: # do not store empty protocol - finding.unsaved_endpoints[0].protocol = column_value - - -class IpColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "ip" - super().__init__() - - def map_column_value(self, finding, column_value): - if not finding.unsaved_endpoints[ - 0 - ].host and column_value is not None: # process only if host is not already defined (by field hostname) - # strip due to https://github.com/greenbone/gvmd/issues/2378 - finding.unsaved_endpoints[0].host = column_value.strip() - - -class HostnameColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "hostname" - super().__init__() - - def map_column_value(self, finding, column_value): - if column_value: # do not override IP if hostname is empty - # strip due to https://github.com/greenbone/gvmd/issues/2378 - finding.unsaved_endpoints[0].host = column_value.strip() - - -class SeverityColumnMappingStrategy(ColumnMappingStrategy): - @staticmethod - def is_valid_severity(severity): - valid_severity = ("Info", "Low", "Medium", "High", "Critical") - return severity in valid_severity - - def __init__(self): - self.mapped_column = "severity" - super().__init__() - - def map_column_value(self, finding, column_value): - if self.is_valid_severity(column_value): - finding.severity = column_value - else: - finding.severity = "Info" - - -class CvssColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "cvss" - super().__init__() - - def map_column_value(self, finding, column_value): - # skip empty values - if not column_value: - return - finding.cvssv3_score = float(column_value) - - -class DescriptionColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "summary" - super().__init__() - - def map_column_value(self, finding, column_value): - finding.description = column_value - - -class MitigationColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "solution" - super().__init__() - - def map_column_value(self, finding, column_value): - finding.mitigation = column_value - - -class ImpactColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "vulnerability insight" - super().__init__() - - def map_column_value(self, finding, column_value): - finding.impact = column_value - - -class ReferencesColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "specific result" - super().__init__() - - def map_column_value(self, finding, column_value): - finding.references = column_value - - -class ActiveColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "active" - super().__init__() - - def map_column_value(self, finding, column_value): - finding.active = self.evaluate_bool_value(column_value) - - -class VerifiedColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "verified" - super().__init__() - - def map_column_value(self, finding, column_value): - finding.verified = self.evaluate_bool_value(column_value) - - -class FalsePositiveColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "falsepositive" - super().__init__() - - def map_column_value(self, finding, column_value): - finding.false_p = self.evaluate_bool_value(column_value) - - -class DuplicateColumnMappingStrategy(ColumnMappingStrategy): - def __init__(self): - self.mapped_column = "duplicate" - super().__init__() - - def map_column_value(self, finding, column_value): - finding.duplicate = self.evaluate_bool_value(column_value) - +def evaluate_bool_value(column_value): + if column_value.lower() == "true": + return True + if column_value.lower() == "false": + return False + return None class OpenVASCSVParser: - def create_chain(self): - date_column_strategy = DateColumnMappingStrategy() - title_column_strategy = TitleColumnMappingStrategy() - cwe_column_strategy = CweColumnMappingStrategy() - ip_column_strategy = IpColumnMappingStrategy() - hostname_column_strategy = HostnameColumnMappingStrategy() - severity_column_strategy = SeverityColumnMappingStrategy() - cvss_score_column_strategy = CvssColumnMappingStrategy() - description_column_strategy = DescriptionColumnMappingStrategy() - mitigation_column_strategy = MitigationColumnMappingStrategy() - impact_column_strategy = ImpactColumnMappingStrategy() - references_column_strategy = ReferencesColumnMappingStrategy() - active_column_strategy = ActiveColumnMappingStrategy() - verified_column_strategy = VerifiedColumnMappingStrategy() - false_positive_strategy = FalsePositiveColumnMappingStrategy() - duplicate_strategy = DuplicateColumnMappingStrategy() - port_strategy = PortColumnMappingStrategy() - protocol_strategy = ProtocolColumnMappingStrategy() - cve_column_strategy = CveColumnMappingStrategy() - nvd_cve_column_strategy = NVDCVEColumnMappingStrategy() - port_strategy.successor = protocol_strategy - duplicate_strategy.successor = port_strategy - false_positive_strategy.successor = duplicate_strategy - verified_column_strategy.successor = false_positive_strategy - active_column_strategy.successor = verified_column_strategy - references_column_strategy.successor = active_column_strategy - impact_column_strategy.successor = references_column_strategy - mitigation_column_strategy.successor = impact_column_strategy - description_column_strategy.successor = mitigation_column_strategy - cvss_score_column_strategy.successor = description_column_strategy - severity_column_strategy.successor = cvss_score_column_strategy - ip_column_strategy.successor = severity_column_strategy - hostname_column_strategy.successor = ip_column_strategy - cwe_column_strategy.successor = hostname_column_strategy - title_column_strategy.successor = cwe_column_strategy - cve_column_strategy.successor = title_column_strategy - nvd_cve_column_strategy.successor = cve_column_strategy - date_column_strategy.successor = nvd_cve_column_strategy - return date_column_strategy - - def read_column_names(self, row): - return dict(enumerate(row)) - def get_findings(self, filename, test): - column_names = {} dupes = {} - chain = self.create_chain() content = filename.read() if isinstance(content, bytes): content = content.decode("utf-8") - reader = csv.reader(io.StringIO(content), delimiter=",", quotechar='"') - for row_number, row in enumerate(reader): - finding = Finding(test=test) + + csv_reader = csv.reader(io.StringIO(content), delimiter=",", quotechar='"') + column_names = [column_name.lower() for column_name in next(csv_reader) if column_name] + + for row in csv_reader: + finding = Finding(test=test, dynamic_finding=True, static_finding=False, severity="Info") finding.unsaved_vulnerability_ids = [] finding.unsaved_endpoints = [Endpoint()] - ip = None - if row_number == 0: - column_names = self.read_column_names(row) - continue - for column_number, column in enumerate(row): - chain.process_column( - column_names[column_number], column, finding, - ) - # due to the way this parser is implemented we have to do this stuff to retrieve a value for later use - if column_names[column_number].lower() == "ip": - ip = column + aux_info = OpenVASFindingAuxData() + + for value, name in zip(row, column_names, strict=False): + self.process_column_element(value, name, finding, aux_info) - if ip: - finding.description += f"\n**IP**: {ip}" + update_description(finding, aux_info) + deduplicate(dupes, finding) - if finding is not None and row_number > 0: - if finding.title is None: - finding.title = "" - if finding.description is None: - finding.description = "" - key = hashlib.sha256( - ( - str(finding.unsaved_endpoints[0]) - + "|" - + finding.severity - + "|" - + finding.title - + "|" - + finding.description - ).encode("utf-8"), - ).hexdigest() - if key not in dupes: - dupes[key] = finding return list(dupes.values()) + + def process_column_element( + self, + column_value: str, + column_name: str, + finding: Finding, + aux_info: OpenVASFindingAuxData, + ): + # skip columns with empty values + if not column_value: + return + + # process column names + if column_name == "nvt name": + finding.title = column_value + elif column_name == "cweid": + if column_value.isdigit(): + finding.cwe = int(column_value) + elif column_name == "cves": + for cve in column_value.split(","): + finding.unsaved_vulnerability_ids.append(cve) + elif column_name == "nvt oid": + cve_pattern = r"CVE-\d{4}-\d{4,7}" # legacy import + cves = re.findall(cve_pattern, column_value) + for cve in cves: + finding.unsaved_vulnerability_ids.append(cve) + if len(cves) == 0: + finding.script_id = column_value + elif column_name == "hostname": + # strip due to https://github.com/greenbone/gvmd/issues/2378 + finding.unsaved_endpoints[0].host = column_value.strip() + elif column_name == "ip": + # fallback to ip if hostname is not aviable + if not finding.unsaved_endpoints[0].host: + # strip due to https://github.com/greenbone/gvmd/issues/2378 + finding.unsaved_endpoints[0].host = column_value.strip() + elif column_name == "port": + if column_value.isdigit(): + finding.unsaved_endpoints[0].port = int(column_value) + elif column_name == "port protocol": + finding.unsaved_endpoints[0].protocol = column_value + elif column_name == "severity": + if is_valid_severity(column_value): + finding.severity = column_value + elif column_name == "cvss": + finding.cvssv3_score = float(column_value) + elif column_name == "summary": + finding.description = column_value + elif column_name == "solution": + finding.mitigation = column_value + elif column_name == "vulnerability insight": + finding.impact = column_value + elif column_name == "specific result": + finding.references = column_value + elif column_name == "qod": + aux_info.qod = column_value + # columns not part of default openvas csv export + elif column_name == "active": + finding.active = evaluate_bool_value(column_value) + elif column_name == "verified": + finding.verified = evaluate_bool_value(column_value) + elif column_name == "falsepositive": + finding.false_p = evaluate_bool_value(column_value) + elif column_name == "duplicate": + finding.duplicate = evaluate_bool_value(column_value) + elif column_name == "timestamp": + finding.date = parse(column_value).date() diff --git a/dojo/tools/openvas/xml_parser.py b/dojo/tools/openvas/xml_parser.py index ed8b3c25185..69dffb2c8c0 100644 --- a/dojo/tools/openvas/xml_parser.py +++ b/dojo/tools/openvas/xml_parser.py @@ -4,77 +4,70 @@ from defusedxml import ElementTree from dojo.models import Endpoint, Finding +from dojo.tools.openvas.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_description class OpenVASXMLParser: def get_findings(self, filename, test): - findings = [] + dupes = {} tree = ElementTree.parse(filename) root = tree.getroot() + if "report" not in root.tag: - msg = "This doesn't seem to be a valid Greenbone OpenVAS XML file." + msg = "This doesn't seem to be a valid Greenbone/ OpenVAS XML file." raise NamespaceErr(msg) report = root.find("report") results = report.find("results") - for result in results: - script_id = None - unsaved_endpoint = Endpoint() - for field in result: - if field.tag == "name": - title = field.text - description = [f"**Name**: {field.text}"] - if field.tag == "hostname": - title = title + "_" + field.text - description.append(f"**Hostname**: {field.text}") - if field.text: - unsaved_endpoint.host = field.text.strip() # strip due to https://github.com/greenbone/gvmd/issues/2378 - if field.tag == "host": - title = title + "_" + field.text - description.append(f"**Host**: {field.text}") - if not unsaved_endpoint.host and field.text: - unsaved_endpoint.host = field.text.strip() # strip due to https://github.com/greenbone/gvmd/issues/2378 - if field.tag == "port": - title = title + "_" + field.text - description.append(f"**Port**: {field.text}") - if field.text: - port_str, protocol = field.text.split("/") - with contextlib.suppress(ValueError): - unsaved_endpoint.port = int(port_str) - unsaved_endpoint.protocol = protocol - if field.tag == "nvt": - description.append(f"**NVT**: {field.text}") - script_id = field.get("oid") or field.text - if field.tag == "severity": - description.append(f"**Severity**: {field.text}") - if field.tag == "threat": - description.append(f"**Threat**: {field.text}") - severity = field.text if field.text in {"Info", "Low", "Medium", "High", "Critical"} else "Info" - if field.tag == "qod": - description.append(f"**QOD**: {field.text}") - if field.tag == "description": - description.append(f"**Description**: {field.text}") + for result in results: finding = Finding( - title=str(title), test=test, - description="\n".join(description), - severity=severity, dynamic_finding=True, static_finding=False, - vuln_id_from_tool=script_id, + severity="Info", ) - finding.unsaved_endpoints = [unsaved_endpoint] - findings.append(finding) - return findings + aux_info = OpenVASFindingAuxData() + + finding.unsaved_vulnerability_ids = [] + finding.unsaved_endpoints = [Endpoint()] + + for field in result: + self.process_field_element(field, finding, aux_info) + + update_description(finding, aux_info) + deduplicate(dupes, finding) + + return list(dupes.values()) + + def process_field_element(self, field, finding: Finding, aux_info: OpenVASFindingAuxData): + if not field.text: + return - def convert_cvss_score(self, raw_value): - val = float(raw_value) - if val == 0.0: - return "Info" - if val < 4.0: - return "Low" - if val < 7.0: - return "Medium" - if val < 9.0: - return "High" - return "Critical" + if field.tag == "name": + finding.title = field.text + elif field.tag == "nvt": + finding.script_id = field.get("oid") + nvt_name = field.find("name").text + if nvt_name: + finding.title = nvt_name + elif field.tag == "hostname": + # strip due to https://github.com/greenbone/gvmd/issues/2378 + finding.unsaved_endpoints[0].host = field.text.strip() + elif field.tag == "host": + if not finding.unsaved_endpoints[0].host: + # strip due to https://github.com/greenbone/gvmd/issues/2378 + finding.unsaved_endpoints[0].host = field.text.strip() + elif field.tag == "port": + port_str, protocol = field.text.split("/") + with contextlib.suppress(ValueError): + finding.unsaved_endpoints[0].port = int(port_str) + finding.unsaved_endpoints[0].protocol = protocol + elif field.tag == "severity": + finding.cvssv3_score = float(field.text) + elif field.tag == "threat": + if is_valid_severity(field.text): + finding.severity = field.text + elif field.tag == "qod": + aux_info.qod = field.text + elif field.tag == "description": + finding.description = field.text diff --git a/unittests/scans/openvas/report_using_openVAS_findings_to_combine.csv b/unittests/scans/openvas/report_using_openVAS_findings_to_combine.csv new file mode 100644 index 00000000000..3ae134cd638 --- /dev/null +++ b/unittests/scans/openvas/report_using_openVAS_findings_to_combine.csv @@ -0,0 +1,57 @@ +IP,Hostname,Port,Port Protocol,CVSS,Severity,QoD,Solution Type,NVT Name,Summary,Specific Result,NVT OID,CVEs,Task ID,Task Name,Timestamp,Result ID,Impact,Solution,Affected Software/OS,Vulnerability Insight,Vulnerability Detection Method,Product Detection Result,BIDs,CERTs,Other References +45.33.32.156,,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 1912048204 +Packet 2: 1912049513 +",1.3.6.1.4.1.25623.1.0.80091,"",ef9cd713-0144-4fe5-a19d-6849983ae3d1,"ScanmeNmap",2024-03-18T12:46:31Z,167b0841-3f29-450b-bb44-a7d88999b3bc,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T16:10:08Z +","","","","" +45.33.32.156,,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 1912048205 +Packet 2: 1912049516 +",1.3.6.1.4.1.25623.1.0.80091,"",ef9cd713-0144-4fe5-a19d-6849983ae3d1,"ScanmeNmap",2024-03-18T12:46:31Z,167b0841-3f29-450b-bb44-a7d88999b3bc,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T16:10:08Z +","","","","" diff --git a/unittests/tools/test_openvas_parser.py b/unittests/tools/test_openvas_parser.py index 7ec8cf7ebf2..4d383d544a7 100644 --- a/unittests/tools/test_openvas_parser.py +++ b/unittests/tools/test_openvas_parser.py @@ -83,6 +83,18 @@ def test_openvas_csv_report_usingOpenVAS(self): self.assertEqual("Info", finding.severity) self.assertEqual(finding.unsaved_vulnerability_ids, []) + def test_openvas_csv_report_combined_findings(self): + with (get_unit_tests_scans_path("openvas") / "report_using_openVAS_findings_to_combine.csv").open(encoding="utf-8") as f: + test = Test() + test.engagement = Engagement() + test.engagement.product = Product() + parser = OpenVASParser() + findings = parser.get_findings(f, test) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual(1, len(findings)) + def test_openvas_xml_no_vuln(self): with (get_unit_tests_scans_path("openvas") / "no_vuln.xml").open(encoding="utf-8") as f: test = Test() @@ -105,7 +117,7 @@ def test_openvas_xml_one_vuln(self): self.assertEqual(1, len(findings)) with self.subTest(i=0): finding = findings[0] - self.assertEqual("Mozilla Firefox Security Update (mfsa_2023-32_2023-36) - Windows_10.0.101.2_general/tcp", finding.title) + self.assertEqual("Mozilla Firefox Security Update (mfsa_2023-32_2023-36) - Windows", finding.title) self.assertEqual("High", finding.severity) def test_openvas_xml_many_vuln(self): From 4644a997c4dd0443efa6ed1e60e604aa5abdff1c Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Fri, 15 Aug 2025 18:36:57 +0200 Subject: [PATCH 02/15] xml parser now parses more information --- dojo/tools/openvas/common.py | 3 ++- dojo/tools/openvas/csv_parser.py | 5 ++-- dojo/tools/openvas/xml_parser.py | 45 ++++++++++++++++++++++++++------ 3 files changed, 42 insertions(+), 11 deletions(-) diff --git a/dojo/tools/openvas/common.py b/dojo/tools/openvas/common.py index 3b91d33f37c..9a0289c0d73 100644 --- a/dojo/tools/openvas/common.py +++ b/dojo/tools/openvas/common.py @@ -21,7 +21,8 @@ def is_valid_severity(severity): def update_description(finding: Finding, aux_info: OpenVASFindingAuxData): - finding.description += f"\n**QoD**: {aux_info.qod}" + if aux_info.qod: + finding.description += f"\n**QoD**: {aux_info.qod}" def deduplicate(dupes: dict[str, Finding], finding: Finding): diff --git a/dojo/tools/openvas/csv_parser.py b/dojo/tools/openvas/csv_parser.py index d5f18b9a5ab..707e27bb97f 100644 --- a/dojo/tools/openvas/csv_parser.py +++ b/dojo/tools/openvas/csv_parser.py @@ -9,9 +9,10 @@ def evaluate_bool_value(column_value): - if column_value.lower() == "true": + value = column_value.lower() + if value == "true": return True - if column_value.lower() == "false": + if value == "false": return False return None diff --git a/dojo/tools/openvas/xml_parser.py b/dojo/tools/openvas/xml_parser.py index 69dffb2c8c0..0799b41bd66 100644 --- a/dojo/tools/openvas/xml_parser.py +++ b/dojo/tools/openvas/xml_parser.py @@ -16,6 +16,7 @@ def get_findings(self, filename, test): if "report" not in root.tag: msg = "This doesn't seem to be a valid Greenbone/ OpenVAS XML file." raise NamespaceErr(msg) + report = root.find("report") results = report.find("results") @@ -39,17 +40,45 @@ def get_findings(self, filename, test): return list(dupes.values()) + def parse_nvt_tags(self, text): + parts = text.strip().split("|") + tags = {} + + for part in parts: + idx = part.find("=") + if idx == -1 or (len(part) < idx + 2): + continue + + key = part[0:idx] + val = part[idx + 1:] + tags[key] = val + return tags + def process_field_element(self, field, finding: Finding, aux_info: OpenVASFindingAuxData): + if field.tag == "nvt": + finding.script_id = field.get("oid") + nvt_name = field.find("name").text + if nvt_name: + finding.title = nvt_name + + # parse tags field + tag_field = field.find("tags") + tags = self.parse_nvt_tags(tag_field.text) + summary = tags.get("summary", None) + if summary: + finding.description = summary + + impact = tags.get("impact", None) + if impact: + finding.impact = impact + elif field.tag == "qod": + aux_info.qod = field.find("value").text + if not field.text: return if field.tag == "name": finding.title = field.text - elif field.tag == "nvt": - finding.script_id = field.get("oid") - nvt_name = field.find("name").text - if nvt_name: - finding.title = nvt_name elif field.tag == "hostname": # strip due to https://github.com/greenbone/gvmd/issues/2378 finding.unsaved_endpoints[0].host = field.text.strip() @@ -67,7 +96,7 @@ def process_field_element(self, field, finding: Finding, aux_info: OpenVASFindin elif field.tag == "threat": if is_valid_severity(field.text): finding.severity = field.text - elif field.tag == "qod": - aux_info.qod = field.text elif field.tag == "description": - finding.description = field.text + finding.references = field.text.strip() + elif field.tag == "solution": + finding.mitigation = field.text From 20c21e21bbfe4180bdf0192d77e6d2201e11947e Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Fri, 15 Aug 2025 20:12:00 +0200 Subject: [PATCH 03/15] improved finding combination --- dojo/tools/openvas/common.py | 35 +++++++++++++++++++++++++---------- 1 file changed, 25 insertions(+), 10 deletions(-) diff --git a/dojo/tools/openvas/common.py b/dojo/tools/openvas/common.py index 9a0289c0d73..98ed70e413a 100644 --- a/dojo/tools/openvas/common.py +++ b/dojo/tools/openvas/common.py @@ -9,7 +9,6 @@ @dataclass class OpenVASFindingAuxData: - """Dataclass to contain all information added later to description""" qod: str = "" @@ -26,28 +25,44 @@ def update_description(finding: Finding, aux_info: OpenVASFindingAuxData): def deduplicate(dupes: dict[str, Finding], finding: Finding): - key = generate_openvas_finding_hash(finding) + """Combine multiple openvas findings into one defectdojo finding with multiple endpoints""" + finding_hash = generate_openvas_finding_hash(finding) # set for use in global deduplication - finding.unique_id_from_tool = key + finding.unique_id_from_tool = finding_hash - if key not in dupes: - dupes[key] = finding + if finding_hash not in dupes: + dupes[finding_hash] = finding else: - # openvas does not combine findings of the same type - # that are listed as multiple findings on the same host e.g + # openvas does not combine multiple findings into one e.g # a vunerability in the java runtime may be reported 2 times # if 2 vulnerable java runtimes are present on the host # and the only way do differantiate this findings are the specific results (mapped to references by the parser) - # but we cannot hash this field as this field can contain data that changes between scans e.g timestamps + # but we cannot hash this field as it can contain data that changes between scans e.g timestamps # we therfore combine them because duplicates during reimport cause # https://github.com/DefectDojo/django-DefectDojo/issues/3958 - org = dupes[key] + org = dupes[finding_hash] if org.references != finding.references: org.references += "\n---------------------------------------\n" + org.references += f"**Endpoint**: {finding.unsaved_endpoints[0].host}\n" org.references += finding.references + # combine identical findings on different hosts into one with multiple hosts + endpoint = finding.unsaved_endpoints[0] + if endpoint not in org.unsaved_endpoints: + org.unsaved_endpoints += finding.unsaved_endpoints + def generate_openvas_finding_hash(finding: Finding): """Generate a hash for a finding that is used for deduplication of findings inside the current report""" - hash_data = [str(finding.unsaved_endpoints[0]), finding.title, finding.description, finding.severity] + endpoint = finding.unsaved_endpoints[0] + hash_data = [ + str(endpoint.protocol), + str(endpoint.userinfo), + str(endpoint.port), + str(endpoint.path), + str(endpoint.fragment), + finding.title, + finding.description, + finding.severity, + ] return hashlib.sha256("|".join(hash_data).encode("utf-8")).hexdigest() From 065649513679dad53e411fbefb8dcff8a371aae5 Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Fri, 15 Aug 2025 20:14:02 +0200 Subject: [PATCH 04/15] fix lint --- dojo/tools/openvas/common.py | 1 + 1 file changed, 1 insertion(+) diff --git a/dojo/tools/openvas/common.py b/dojo/tools/openvas/common.py index 98ed70e413a..dd8d07b1ff5 100644 --- a/dojo/tools/openvas/common.py +++ b/dojo/tools/openvas/common.py @@ -9,6 +9,7 @@ @dataclass class OpenVASFindingAuxData: + """Dataclass to contain all information added later to description""" qod: str = "" From f988daed63a19da45e57cb9a7a9d5d9d64eb44dc Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Wed, 27 Aug 2025 17:45:38 +0200 Subject: [PATCH 05/15] moved changes to v2 version --- dojo/tools/openvas_v2/__init__.py | 1 + dojo/tools/openvas_v2/common.py | 69 ++++++++++++++++++ dojo/tools/openvas_v2/csv_parser.py | 109 ++++++++++++++++++++++++++++ dojo/tools/openvas_v2/parser.py | 20 +++++ dojo/tools/openvas_v2/xml_parser.py | 102 ++++++++++++++++++++++++++ 5 files changed, 301 insertions(+) create mode 100644 dojo/tools/openvas_v2/__init__.py create mode 100644 dojo/tools/openvas_v2/common.py create mode 100644 dojo/tools/openvas_v2/csv_parser.py create mode 100644 dojo/tools/openvas_v2/parser.py create mode 100644 dojo/tools/openvas_v2/xml_parser.py diff --git a/dojo/tools/openvas_v2/__init__.py b/dojo/tools/openvas_v2/__init__.py new file mode 100644 index 00000000000..3ad798a42b3 --- /dev/null +++ b/dojo/tools/openvas_v2/__init__.py @@ -0,0 +1 @@ +__author__ = "manuel-sommer" diff --git a/dojo/tools/openvas_v2/common.py b/dojo/tools/openvas_v2/common.py new file mode 100644 index 00000000000..dd8d07b1ff5 --- /dev/null +++ b/dojo/tools/openvas_v2/common.py @@ -0,0 +1,69 @@ +import hashlib +import os +from dataclasses import dataclass + +from dojo.models import Finding + +OPENVAS_SEVERITY_OVERWRITE = os.environ.get("OPENVAS_SEVERITY_OVERWRITE", "False").lower() in {"true", 1} + + +@dataclass +class OpenVASFindingAuxData: + + """Dataclass to contain all information added later to description""" + + qod: str = "" + + +def is_valid_severity(severity): + valid_severity = ("Info", "Low", "Medium", "High", "Critical") + return severity in valid_severity + + +def update_description(finding: Finding, aux_info: OpenVASFindingAuxData): + if aux_info.qod: + finding.description += f"\n**QoD**: {aux_info.qod}" + + +def deduplicate(dupes: dict[str, Finding], finding: Finding): + """Combine multiple openvas findings into one defectdojo finding with multiple endpoints""" + finding_hash = generate_openvas_finding_hash(finding) + # set for use in global deduplication + finding.unique_id_from_tool = finding_hash + + if finding_hash not in dupes: + dupes[finding_hash] = finding + else: + # openvas does not combine multiple findings into one e.g + # a vunerability in the java runtime may be reported 2 times + # if 2 vulnerable java runtimes are present on the host + # and the only way do differantiate this findings are the specific results (mapped to references by the parser) + # but we cannot hash this field as it can contain data that changes between scans e.g timestamps + # we therfore combine them because duplicates during reimport cause + # https://github.com/DefectDojo/django-DefectDojo/issues/3958 + org = dupes[finding_hash] + if org.references != finding.references: + org.references += "\n---------------------------------------\n" + org.references += f"**Endpoint**: {finding.unsaved_endpoints[0].host}\n" + org.references += finding.references + + # combine identical findings on different hosts into one with multiple hosts + endpoint = finding.unsaved_endpoints[0] + if endpoint not in org.unsaved_endpoints: + org.unsaved_endpoints += finding.unsaved_endpoints + + +def generate_openvas_finding_hash(finding: Finding): + """Generate a hash for a finding that is used for deduplication of findings inside the current report""" + endpoint = finding.unsaved_endpoints[0] + hash_data = [ + str(endpoint.protocol), + str(endpoint.userinfo), + str(endpoint.port), + str(endpoint.path), + str(endpoint.fragment), + finding.title, + finding.description, + finding.severity, + ] + return hashlib.sha256("|".join(hash_data).encode("utf-8")).hexdigest() diff --git a/dojo/tools/openvas_v2/csv_parser.py b/dojo/tools/openvas_v2/csv_parser.py new file mode 100644 index 00000000000..42b012d67f5 --- /dev/null +++ b/dojo/tools/openvas_v2/csv_parser.py @@ -0,0 +1,109 @@ +import csv +import io +import re + +from dateutil.parser import parse + +from dojo.models import Endpoint, Finding +from dojo.tools.openvas.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_description + + +def evaluate_bool_value(column_value): + value = column_value.lower() + if value == "true": + return True + if value == "false": + return False + return None + + +class OpenVASCSVParser: + def get_findings(self, filename, test): + dupes = {} + content = filename.read() + if isinstance(content, bytes): + content = content.decode("utf-8") + + csv_reader = csv.reader(io.StringIO(content), delimiter=",", quotechar='"') + column_names = [column_name.lower() for column_name in next(csv_reader) if column_name] + + for row in csv_reader: + finding = Finding(test=test, dynamic_finding=True, static_finding=False, severity="Info") + finding.unsaved_vulnerability_ids = [] + finding.unsaved_endpoints = [Endpoint()] + aux_info = OpenVASFindingAuxData() + + for value, name in zip(row, column_names, strict=False): + self.process_column_element(value, name, finding, aux_info) + + update_description(finding, aux_info) + deduplicate(dupes, finding) + + return list(dupes.values()) + + def process_column_element( + self, + column_value: str, + column_name: str, + finding: Finding, + aux_info: OpenVASFindingAuxData, + ): + # skip columns with empty values + if not column_value: + return + + # process column names + if column_name == "nvt name": + finding.title = column_value + elif column_name == "cweid": + if column_value.isdigit(): + finding.cwe = int(column_value) + elif column_name == "cves": + for cve in column_value.split(","): + finding.unsaved_vulnerability_ids.append(cve) + elif column_name == "nvt oid": + cve_pattern = r"CVE-\d{4}-\d{4,7}" # legacy import + cves = re.findall(cve_pattern, column_value) + for cve in cves: + finding.unsaved_vulnerability_ids.append(cve) + if len(cves) == 0: + finding.script_id = column_value + elif column_name == "hostname": + # strip due to https://github.com/greenbone/gvmd/issues/2378 + finding.unsaved_endpoints[0].host = column_value.strip() + elif column_name == "ip": + # fallback to ip if hostname is not aviable + if not finding.unsaved_endpoints[0].host: + # strip due to https://github.com/greenbone/gvmd/issues/2378 + finding.unsaved_endpoints[0].host = column_value.strip() + elif column_name == "port": + if column_value.isdigit(): + finding.unsaved_endpoints[0].port = int(column_value) + elif column_name == "port protocol": + finding.unsaved_endpoints[0].protocol = column_value + elif column_name == "severity": + if is_valid_severity(column_value): + finding.severity = column_value + elif column_name == "cvss": + finding.cvssv3_score = float(column_value) + elif column_name == "summary": + finding.description = column_value + elif column_name == "solution": + finding.mitigation = column_value + elif column_name == "vulnerability insight": + finding.impact = column_value + elif column_name == "specific result": + finding.references = column_value + elif column_name == "qod": + aux_info.qod = column_value + # columns not part of default openvas csv export + elif column_name == "active": + finding.active = evaluate_bool_value(column_value) + elif column_name == "verified": + finding.verified = evaluate_bool_value(column_value) + elif column_name == "falsepositive": + finding.false_p = evaluate_bool_value(column_value) + elif column_name == "duplicate": + finding.duplicate = evaluate_bool_value(column_value) + elif column_name == "timestamp": + finding.date = parse(column_value).date() diff --git a/dojo/tools/openvas_v2/parser.py b/dojo/tools/openvas_v2/parser.py new file mode 100644 index 00000000000..9f366c17694 --- /dev/null +++ b/dojo/tools/openvas_v2/parser.py @@ -0,0 +1,20 @@ +from dojo.tools.openvas.csv_parser import OpenVASCSVParser +from dojo.tools.openvas.xml_parser import OpenVASXMLParser + + +class OpenVASParser: + def get_scan_types(self): + return ["OpenVAS Parser"] + + def get_label_for_scan_types(self, scan_type): + return scan_type # no custom label for now + + def get_description_for_scan_types(self, scan_type): + return "Import CSV or XML output of Greenbone OpenVAS report." + + def get_findings(self, filename, test): + if str(filename.name).endswith(".csv"): + return OpenVASCSVParser().get_findings(filename, test) + if str(filename.name).endswith(".xml"): + return OpenVASXMLParser().get_findings(filename, test) + return None diff --git a/dojo/tools/openvas_v2/xml_parser.py b/dojo/tools/openvas_v2/xml_parser.py new file mode 100644 index 00000000000..0799b41bd66 --- /dev/null +++ b/dojo/tools/openvas_v2/xml_parser.py @@ -0,0 +1,102 @@ +import contextlib +from xml.dom import NamespaceErr + +from defusedxml import ElementTree + +from dojo.models import Endpoint, Finding +from dojo.tools.openvas.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_description + + +class OpenVASXMLParser: + def get_findings(self, filename, test): + dupes = {} + tree = ElementTree.parse(filename) + root = tree.getroot() + + if "report" not in root.tag: + msg = "This doesn't seem to be a valid Greenbone/ OpenVAS XML file." + raise NamespaceErr(msg) + + report = root.find("report") + results = report.find("results") + + for result in results: + finding = Finding( + test=test, + dynamic_finding=True, + static_finding=False, + severity="Info", + ) + aux_info = OpenVASFindingAuxData() + + finding.unsaved_vulnerability_ids = [] + finding.unsaved_endpoints = [Endpoint()] + + for field in result: + self.process_field_element(field, finding, aux_info) + + update_description(finding, aux_info) + deduplicate(dupes, finding) + + return list(dupes.values()) + + def parse_nvt_tags(self, text): + parts = text.strip().split("|") + tags = {} + + for part in parts: + idx = part.find("=") + if idx == -1 or (len(part) < idx + 2): + continue + + key = part[0:idx] + val = part[idx + 1:] + tags[key] = val + return tags + + def process_field_element(self, field, finding: Finding, aux_info: OpenVASFindingAuxData): + if field.tag == "nvt": + finding.script_id = field.get("oid") + nvt_name = field.find("name").text + if nvt_name: + finding.title = nvt_name + + # parse tags field + tag_field = field.find("tags") + tags = self.parse_nvt_tags(tag_field.text) + summary = tags.get("summary", None) + if summary: + finding.description = summary + + impact = tags.get("impact", None) + if impact: + finding.impact = impact + elif field.tag == "qod": + aux_info.qod = field.find("value").text + + if not field.text: + return + + if field.tag == "name": + finding.title = field.text + elif field.tag == "hostname": + # strip due to https://github.com/greenbone/gvmd/issues/2378 + finding.unsaved_endpoints[0].host = field.text.strip() + elif field.tag == "host": + if not finding.unsaved_endpoints[0].host: + # strip due to https://github.com/greenbone/gvmd/issues/2378 + finding.unsaved_endpoints[0].host = field.text.strip() + elif field.tag == "port": + port_str, protocol = field.text.split("/") + with contextlib.suppress(ValueError): + finding.unsaved_endpoints[0].port = int(port_str) + finding.unsaved_endpoints[0].protocol = protocol + elif field.tag == "severity": + finding.cvssv3_score = float(field.text) + elif field.tag == "threat": + if is_valid_severity(field.text): + finding.severity = field.text + elif field.tag == "description": + finding.references = field.text.strip() + elif field.tag == "solution": + finding.mitigation = field.text From a62e54d20dc0bf45bf3864a25b4666d694097822 Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Wed, 27 Aug 2025 17:47:10 +0200 Subject: [PATCH 06/15] restored old v1 version --- dojo/tools/openvas/common.py | 69 ------ dojo/tools/openvas/csv_parser.py | 392 ++++++++++++++++++++++++------- dojo/tools/openvas/xml_parser.py | 138 +++++------ 3 files changed, 363 insertions(+), 236 deletions(-) delete mode 100644 dojo/tools/openvas/common.py diff --git a/dojo/tools/openvas/common.py b/dojo/tools/openvas/common.py deleted file mode 100644 index dd8d07b1ff5..00000000000 --- a/dojo/tools/openvas/common.py +++ /dev/null @@ -1,69 +0,0 @@ -import hashlib -import os -from dataclasses import dataclass - -from dojo.models import Finding - -OPENVAS_SEVERITY_OVERWRITE = os.environ.get("OPENVAS_SEVERITY_OVERWRITE", "False").lower() in {"true", 1} - - -@dataclass -class OpenVASFindingAuxData: - - """Dataclass to contain all information added later to description""" - - qod: str = "" - - -def is_valid_severity(severity): - valid_severity = ("Info", "Low", "Medium", "High", "Critical") - return severity in valid_severity - - -def update_description(finding: Finding, aux_info: OpenVASFindingAuxData): - if aux_info.qod: - finding.description += f"\n**QoD**: {aux_info.qod}" - - -def deduplicate(dupes: dict[str, Finding], finding: Finding): - """Combine multiple openvas findings into one defectdojo finding with multiple endpoints""" - finding_hash = generate_openvas_finding_hash(finding) - # set for use in global deduplication - finding.unique_id_from_tool = finding_hash - - if finding_hash not in dupes: - dupes[finding_hash] = finding - else: - # openvas does not combine multiple findings into one e.g - # a vunerability in the java runtime may be reported 2 times - # if 2 vulnerable java runtimes are present on the host - # and the only way do differantiate this findings are the specific results (mapped to references by the parser) - # but we cannot hash this field as it can contain data that changes between scans e.g timestamps - # we therfore combine them because duplicates during reimport cause - # https://github.com/DefectDojo/django-DefectDojo/issues/3958 - org = dupes[finding_hash] - if org.references != finding.references: - org.references += "\n---------------------------------------\n" - org.references += f"**Endpoint**: {finding.unsaved_endpoints[0].host}\n" - org.references += finding.references - - # combine identical findings on different hosts into one with multiple hosts - endpoint = finding.unsaved_endpoints[0] - if endpoint not in org.unsaved_endpoints: - org.unsaved_endpoints += finding.unsaved_endpoints - - -def generate_openvas_finding_hash(finding: Finding): - """Generate a hash for a finding that is used for deduplication of findings inside the current report""" - endpoint = finding.unsaved_endpoints[0] - hash_data = [ - str(endpoint.protocol), - str(endpoint.userinfo), - str(endpoint.port), - str(endpoint.path), - str(endpoint.fragment), - finding.title, - finding.description, - finding.severity, - ] - return hashlib.sha256("|".join(hash_data).encode("utf-8")).hexdigest() diff --git a/dojo/tools/openvas/csv_parser.py b/dojo/tools/openvas/csv_parser.py index 707e27bb97f..19fd92b33c4 100644 --- a/dojo/tools/openvas/csv_parser.py +++ b/dojo/tools/openvas/csv_parser.py @@ -1,108 +1,326 @@ import csv +import hashlib import io import re from dateutil.parser import parse from dojo.models import Endpoint, Finding -from dojo.tools.openvas.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_description -def evaluate_bool_value(column_value): - value = column_value.lower() - if value == "true": - return True - if value == "false": - return False - return None +class ColumnMappingStrategy: + mapped_column = None + + def __init__(self): + self.successor = None + + def map_column_value(self, finding, column_value): + pass + + @staticmethod + def evaluate_bool_value(column_value): + if column_value.lower() == "true": + return True + if column_value.lower() == "false": + return False + return None + + def process_column(self, column_name, column_value, finding): + if ( + column_name.lower() == self.mapped_column + and column_value is not None + ): + self.map_column_value(finding, column_value) + elif self.successor is not None: + self.successor.process_column(column_name, column_value, finding) + + +class DateColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "timestamp" + super().__init__() + + def map_column_value(self, finding, column_value): + finding.date = parse(column_value).date() + + +class TitleColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "nvt name" + super().__init__() + + def map_column_value(self, finding, column_value): + finding.title = column_value + + +class CweColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "cweid" + super().__init__() + + def map_column_value(self, finding, column_value): + if column_value.isdigit(): + finding.cwe = int(column_value) + + +class PortColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "port" + super().__init__() + + def map_column_value(self, finding, column_value): + if column_value.isdigit(): + finding.unsaved_endpoints[0].port = int(column_value) + + +class CveColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "cves" + super().__init__() + + def map_column_value(self, finding, column_value): + if column_value != "": + if "," in column_value: + finding.description += "\n**All CVEs:** " + str(column_value) + for value in column_value.split(","): + finding.unsaved_vulnerability_ids.append(value) + else: + finding.unsaved_vulnerability_ids.append(column_value) + + +class NVDCVEColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "nvt oid" + super().__init__() + + def map_column_value(self, finding, column_value): + cve_pattern = r"CVE-\d{4}-\d{4,7}" + cves = re.findall(cve_pattern, column_value) + for cve in cves: + finding.unsaved_vulnerability_ids.append(cve) + + +class ProtocolColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "port protocol" + super().__init__() + + def map_column_value(self, finding, column_value): + if column_value: # do not store empty protocol + finding.unsaved_endpoints[0].protocol = column_value + + +class IpColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "ip" + super().__init__() + + def map_column_value(self, finding, column_value): + if not finding.unsaved_endpoints[ + 0 + ].host and column_value is not None: # process only if host is not already defined (by field hostname) + # strip due to https://github.com/greenbone/gvmd/issues/2378 + finding.unsaved_endpoints[0].host = column_value.strip() + + +class HostnameColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "hostname" + super().__init__() + + def map_column_value(self, finding, column_value): + if column_value: # do not override IP if hostname is empty + # strip due to https://github.com/greenbone/gvmd/issues/2378 + finding.unsaved_endpoints[0].host = column_value.strip() + + +class SeverityColumnMappingStrategy(ColumnMappingStrategy): + @staticmethod + def is_valid_severity(severity): + valid_severity = ("Info", "Low", "Medium", "High", "Critical") + return severity in valid_severity + + def __init__(self): + self.mapped_column = "severity" + super().__init__() + + def map_column_value(self, finding, column_value): + if self.is_valid_severity(column_value): + finding.severity = column_value + else: + finding.severity = "Info" + + +class CvssColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "cvss" + super().__init__() + + def map_column_value(self, finding, column_value): + # skip empty values + if not column_value: + return + finding.cvssv3_score = float(column_value) + + +class DescriptionColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "summary" + super().__init__() + + def map_column_value(self, finding, column_value): + finding.description = column_value + + +class MitigationColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "solution" + super().__init__() + + def map_column_value(self, finding, column_value): + finding.mitigation = column_value + + +class ImpactColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "vulnerability insight" + super().__init__() + + def map_column_value(self, finding, column_value): + finding.impact = column_value + + +class ReferencesColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "specific result" + super().__init__() + + def map_column_value(self, finding, column_value): + finding.references = column_value + + +class ActiveColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "active" + super().__init__() + + def map_column_value(self, finding, column_value): + finding.active = self.evaluate_bool_value(column_value) + + +class VerifiedColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "verified" + super().__init__() + + def map_column_value(self, finding, column_value): + finding.verified = self.evaluate_bool_value(column_value) + + +class FalsePositiveColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "falsepositive" + super().__init__() + + def map_column_value(self, finding, column_value): + finding.false_p = self.evaluate_bool_value(column_value) + + +class DuplicateColumnMappingStrategy(ColumnMappingStrategy): + def __init__(self): + self.mapped_column = "duplicate" + super().__init__() + + def map_column_value(self, finding, column_value): + finding.duplicate = self.evaluate_bool_value(column_value) class OpenVASCSVParser: + def create_chain(self): + date_column_strategy = DateColumnMappingStrategy() + title_column_strategy = TitleColumnMappingStrategy() + cwe_column_strategy = CweColumnMappingStrategy() + ip_column_strategy = IpColumnMappingStrategy() + hostname_column_strategy = HostnameColumnMappingStrategy() + severity_column_strategy = SeverityColumnMappingStrategy() + cvss_score_column_strategy = CvssColumnMappingStrategy() + description_column_strategy = DescriptionColumnMappingStrategy() + mitigation_column_strategy = MitigationColumnMappingStrategy() + impact_column_strategy = ImpactColumnMappingStrategy() + references_column_strategy = ReferencesColumnMappingStrategy() + active_column_strategy = ActiveColumnMappingStrategy() + verified_column_strategy = VerifiedColumnMappingStrategy() + false_positive_strategy = FalsePositiveColumnMappingStrategy() + duplicate_strategy = DuplicateColumnMappingStrategy() + port_strategy = PortColumnMappingStrategy() + protocol_strategy = ProtocolColumnMappingStrategy() + cve_column_strategy = CveColumnMappingStrategy() + nvd_cve_column_strategy = NVDCVEColumnMappingStrategy() + port_strategy.successor = protocol_strategy + duplicate_strategy.successor = port_strategy + false_positive_strategy.successor = duplicate_strategy + verified_column_strategy.successor = false_positive_strategy + active_column_strategy.successor = verified_column_strategy + references_column_strategy.successor = active_column_strategy + impact_column_strategy.successor = references_column_strategy + mitigation_column_strategy.successor = impact_column_strategy + description_column_strategy.successor = mitigation_column_strategy + cvss_score_column_strategy.successor = description_column_strategy + severity_column_strategy.successor = cvss_score_column_strategy + ip_column_strategy.successor = severity_column_strategy + hostname_column_strategy.successor = ip_column_strategy + cwe_column_strategy.successor = hostname_column_strategy + title_column_strategy.successor = cwe_column_strategy + cve_column_strategy.successor = title_column_strategy + nvd_cve_column_strategy.successor = cve_column_strategy + date_column_strategy.successor = nvd_cve_column_strategy + return date_column_strategy + + def read_column_names(self, row): + return dict(enumerate(row)) + def get_findings(self, filename, test): + column_names = {} dupes = {} + chain = self.create_chain() content = filename.read() if isinstance(content, bytes): content = content.decode("utf-8") - - csv_reader = csv.reader(io.StringIO(content), delimiter=",", quotechar='"') - column_names = [column_name.lower() for column_name in next(csv_reader) if column_name] - - for row in csv_reader: - finding = Finding(test=test, dynamic_finding=True, static_finding=False, severity="Info") + reader = csv.reader(io.StringIO(content), delimiter=",", quotechar='"') + for row_number, row in enumerate(reader): + finding = Finding(test=test) finding.unsaved_vulnerability_ids = [] finding.unsaved_endpoints = [Endpoint()] - aux_info = OpenVASFindingAuxData() - - for value, name in zip(row, column_names, strict=False): - self.process_column_element(value, name, finding, aux_info) + ip = None + if row_number == 0: + column_names = self.read_column_names(row) + continue + for column_number, column in enumerate(row): + chain.process_column( + column_names[column_number], column, finding, + ) + # due to the way this parser is implemented we have to do this stuff to retrieve a value for later use + if column_names[column_number].lower() == "ip": + ip = column - update_description(finding, aux_info) - deduplicate(dupes, finding) + if ip: + finding.description += f"\n**IP**: {ip}" + if finding is not None and row_number > 0: + if finding.title is None: + finding.title = "" + if finding.description is None: + finding.description = "" + key = hashlib.sha256( + ( + str(finding.unsaved_endpoints[0]) + + "|" + + finding.severity + + "|" + + finding.title + + "|" + + finding.description + ).encode("utf-8"), + ).hexdigest() + if key not in dupes: + dupes[key] = finding return list(dupes.values()) - - def process_column_element( - self, - column_value: str, - column_name: str, - finding: Finding, - aux_info: OpenVASFindingAuxData, - ): - # skip columns with empty values - if not column_value: - return - - # process column names - if column_name == "nvt name": - finding.title = column_value - elif column_name == "cweid": - if column_value.isdigit(): - finding.cwe = int(column_value) - elif column_name == "cves": - for cve in column_value.split(","): - finding.unsaved_vulnerability_ids.append(cve) - elif column_name == "nvt oid": - cve_pattern = r"CVE-\d{4}-\d{4,7}" # legacy import - cves = re.findall(cve_pattern, column_value) - for cve in cves: - finding.unsaved_vulnerability_ids.append(cve) - if len(cves) == 0: - finding.script_id = column_value - elif column_name == "hostname": - # strip due to https://github.com/greenbone/gvmd/issues/2378 - finding.unsaved_endpoints[0].host = column_value.strip() - elif column_name == "ip": - # fallback to ip if hostname is not aviable - if not finding.unsaved_endpoints[0].host: - # strip due to https://github.com/greenbone/gvmd/issues/2378 - finding.unsaved_endpoints[0].host = column_value.strip() - elif column_name == "port": - if column_value.isdigit(): - finding.unsaved_endpoints[0].port = int(column_value) - elif column_name == "port protocol": - finding.unsaved_endpoints[0].protocol = column_value - elif column_name == "severity": - if is_valid_severity(column_value): - finding.severity = column_value - elif column_name == "cvss": - finding.cvssv3_score = float(column_value) - elif column_name == "summary": - finding.description = column_value - elif column_name == "solution": - finding.mitigation = column_value - elif column_name == "vulnerability insight": - finding.impact = column_value - elif column_name == "specific result": - finding.references = column_value - elif column_name == "qod": - aux_info.qod = column_value - # columns not part of default openvas csv export - elif column_name == "active": - finding.active = evaluate_bool_value(column_value) - elif column_name == "verified": - finding.verified = evaluate_bool_value(column_value) - elif column_name == "falsepositive": - finding.false_p = evaluate_bool_value(column_value) - elif column_name == "duplicate": - finding.duplicate = evaluate_bool_value(column_value) - elif column_name == "timestamp": - finding.date = parse(column_value).date() diff --git a/dojo/tools/openvas/xml_parser.py b/dojo/tools/openvas/xml_parser.py index 0799b41bd66..ed8b3c25185 100644 --- a/dojo/tools/openvas/xml_parser.py +++ b/dojo/tools/openvas/xml_parser.py @@ -4,99 +4,77 @@ from defusedxml import ElementTree from dojo.models import Endpoint, Finding -from dojo.tools.openvas.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_description class OpenVASXMLParser: def get_findings(self, filename, test): - dupes = {} + findings = [] tree = ElementTree.parse(filename) root = tree.getroot() - if "report" not in root.tag: - msg = "This doesn't seem to be a valid Greenbone/ OpenVAS XML file." + msg = "This doesn't seem to be a valid Greenbone OpenVAS XML file." raise NamespaceErr(msg) - report = root.find("report") results = report.find("results") - for result in results: + script_id = None + unsaved_endpoint = Endpoint() + for field in result: + if field.tag == "name": + title = field.text + description = [f"**Name**: {field.text}"] + if field.tag == "hostname": + title = title + "_" + field.text + description.append(f"**Hostname**: {field.text}") + if field.text: + unsaved_endpoint.host = field.text.strip() # strip due to https://github.com/greenbone/gvmd/issues/2378 + if field.tag == "host": + title = title + "_" + field.text + description.append(f"**Host**: {field.text}") + if not unsaved_endpoint.host and field.text: + unsaved_endpoint.host = field.text.strip() # strip due to https://github.com/greenbone/gvmd/issues/2378 + if field.tag == "port": + title = title + "_" + field.text + description.append(f"**Port**: {field.text}") + if field.text: + port_str, protocol = field.text.split("/") + with contextlib.suppress(ValueError): + unsaved_endpoint.port = int(port_str) + unsaved_endpoint.protocol = protocol + if field.tag == "nvt": + description.append(f"**NVT**: {field.text}") + script_id = field.get("oid") or field.text + if field.tag == "severity": + description.append(f"**Severity**: {field.text}") + if field.tag == "threat": + description.append(f"**Threat**: {field.text}") + severity = field.text if field.text in {"Info", "Low", "Medium", "High", "Critical"} else "Info" + if field.tag == "qod": + description.append(f"**QOD**: {field.text}") + if field.tag == "description": + description.append(f"**Description**: {field.text}") + finding = Finding( + title=str(title), test=test, + description="\n".join(description), + severity=severity, dynamic_finding=True, static_finding=False, - severity="Info", + vuln_id_from_tool=script_id, ) - aux_info = OpenVASFindingAuxData() - - finding.unsaved_vulnerability_ids = [] - finding.unsaved_endpoints = [Endpoint()] - - for field in result: - self.process_field_element(field, finding, aux_info) - - update_description(finding, aux_info) - deduplicate(dupes, finding) - - return list(dupes.values()) - - def parse_nvt_tags(self, text): - parts = text.strip().split("|") - tags = {} - - for part in parts: - idx = part.find("=") - if idx == -1 or (len(part) < idx + 2): - continue - - key = part[0:idx] - val = part[idx + 1:] - tags[key] = val - return tags - - def process_field_element(self, field, finding: Finding, aux_info: OpenVASFindingAuxData): - if field.tag == "nvt": - finding.script_id = field.get("oid") - nvt_name = field.find("name").text - if nvt_name: - finding.title = nvt_name - - # parse tags field - tag_field = field.find("tags") - tags = self.parse_nvt_tags(tag_field.text) - summary = tags.get("summary", None) - if summary: - finding.description = summary - - impact = tags.get("impact", None) - if impact: - finding.impact = impact - elif field.tag == "qod": - aux_info.qod = field.find("value").text - - if not field.text: - return - - if field.tag == "name": - finding.title = field.text - elif field.tag == "hostname": - # strip due to https://github.com/greenbone/gvmd/issues/2378 - finding.unsaved_endpoints[0].host = field.text.strip() - elif field.tag == "host": - if not finding.unsaved_endpoints[0].host: - # strip due to https://github.com/greenbone/gvmd/issues/2378 - finding.unsaved_endpoints[0].host = field.text.strip() - elif field.tag == "port": - port_str, protocol = field.text.split("/") - with contextlib.suppress(ValueError): - finding.unsaved_endpoints[0].port = int(port_str) - finding.unsaved_endpoints[0].protocol = protocol - elif field.tag == "severity": - finding.cvssv3_score = float(field.text) - elif field.tag == "threat": - if is_valid_severity(field.text): - finding.severity = field.text - elif field.tag == "description": - finding.references = field.text.strip() - elif field.tag == "solution": - finding.mitigation = field.text + finding.unsaved_endpoints = [unsaved_endpoint] + findings.append(finding) + return findings + + def convert_cvss_score(self, raw_value): + val = float(raw_value) + if val == 0.0: + return "Info" + if val < 4.0: + return "Low" + if val < 7.0: + return "Medium" + if val < 9.0: + return "High" + return "Critical" From e87b478365332b6e421e9c752aabc25c6ddf9e62 Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Wed, 27 Aug 2025 17:50:16 +0200 Subject: [PATCH 07/15] renamed parser classes --- dojo/tools/openvas_v2/csv_parser.py | 2 +- dojo/tools/openvas_v2/parser.py | 8 ++++---- dojo/tools/openvas_v2/xml_parser.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/dojo/tools/openvas_v2/csv_parser.py b/dojo/tools/openvas_v2/csv_parser.py index 42b012d67f5..96aee7f03d6 100644 --- a/dojo/tools/openvas_v2/csv_parser.py +++ b/dojo/tools/openvas_v2/csv_parser.py @@ -17,7 +17,7 @@ def evaluate_bool_value(column_value): return None -class OpenVASCSVParser: +class OpenVASCSVParserV2: def get_findings(self, filename, test): dupes = {} content = filename.read() diff --git a/dojo/tools/openvas_v2/parser.py b/dojo/tools/openvas_v2/parser.py index 9f366c17694..52f92ab2302 100644 --- a/dojo/tools/openvas_v2/parser.py +++ b/dojo/tools/openvas_v2/parser.py @@ -1,10 +1,10 @@ -from dojo.tools.openvas.csv_parser import OpenVASCSVParser -from dojo.tools.openvas.xml_parser import OpenVASXMLParser +from dojo.tools.openvas_v2.csv_parser import OpenVASCSVParserV2 +from dojo.tools.openvas_v2.xml_parser import OpenVASXMLParserV2 -class OpenVASParser: +class OpenVASParserV2: def get_scan_types(self): - return ["OpenVAS Parser"] + return ["OpenVAS Parser V2"] def get_label_for_scan_types(self, scan_type): return scan_type # no custom label for now diff --git a/dojo/tools/openvas_v2/xml_parser.py b/dojo/tools/openvas_v2/xml_parser.py index 0799b41bd66..fff9c62f9f6 100644 --- a/dojo/tools/openvas_v2/xml_parser.py +++ b/dojo/tools/openvas_v2/xml_parser.py @@ -7,7 +7,7 @@ from dojo.tools.openvas.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_description -class OpenVASXMLParser: +class OpenVASXMLParserV2: def get_findings(self, filename, test): dupes = {} tree = ElementTree.parse(filename) From cb8fb492ed5c335f1b91342f861f7691ac45bd03 Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Wed, 27 Aug 2025 17:55:39 +0200 Subject: [PATCH 08/15] seperated tests for openvas v2 --- unittests/tools/test_openvas_parser.py | 14 +-- unittests/tools/test_openvas_parser_v2.py | 135 ++++++++++++++++++++++ 2 files changed, 136 insertions(+), 13 deletions(-) create mode 100644 unittests/tools/test_openvas_parser_v2.py diff --git a/unittests/tools/test_openvas_parser.py b/unittests/tools/test_openvas_parser.py index 4d383d544a7..7ec8cf7ebf2 100644 --- a/unittests/tools/test_openvas_parser.py +++ b/unittests/tools/test_openvas_parser.py @@ -83,18 +83,6 @@ def test_openvas_csv_report_usingOpenVAS(self): self.assertEqual("Info", finding.severity) self.assertEqual(finding.unsaved_vulnerability_ids, []) - def test_openvas_csv_report_combined_findings(self): - with (get_unit_tests_scans_path("openvas") / "report_using_openVAS_findings_to_combine.csv").open(encoding="utf-8") as f: - test = Test() - test.engagement = Engagement() - test.engagement.product = Product() - parser = OpenVASParser() - findings = parser.get_findings(f, test) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - self.assertEqual(1, len(findings)) - def test_openvas_xml_no_vuln(self): with (get_unit_tests_scans_path("openvas") / "no_vuln.xml").open(encoding="utf-8") as f: test = Test() @@ -117,7 +105,7 @@ def test_openvas_xml_one_vuln(self): self.assertEqual(1, len(findings)) with self.subTest(i=0): finding = findings[0] - self.assertEqual("Mozilla Firefox Security Update (mfsa_2023-32_2023-36) - Windows", finding.title) + self.assertEqual("Mozilla Firefox Security Update (mfsa_2023-32_2023-36) - Windows_10.0.101.2_general/tcp", finding.title) self.assertEqual("High", finding.severity) def test_openvas_xml_many_vuln(self): diff --git a/unittests/tools/test_openvas_parser_v2.py b/unittests/tools/test_openvas_parser_v2.py new file mode 100644 index 00000000000..4d383d544a7 --- /dev/null +++ b/unittests/tools/test_openvas_parser_v2.py @@ -0,0 +1,135 @@ +from dojo.models import Engagement, Product, Test +from dojo.tools.openvas.parser import OpenVASParser +from unittests.dojo_test_case import DojoTestCase, get_unit_tests_scans_path + + +class TestOpenVASParser(DojoTestCase): + def test_openvas_csv_one_vuln(self): + with (get_unit_tests_scans_path("openvas") / "one_vuln.csv").open(encoding="utf-8") as f: + test = Test() + test.engagement = Engagement() + test.engagement.product = Product() + parser = OpenVASParser() + findings = parser.get_findings(f, test) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual(1, len(findings)) + # finding + self.assertEqual("SSH Weak Encryption Algorithms Supported", findings[0].title) + self.assertEqual("Medium", findings[0].severity) + # endpoints + self.assertEqual(1, len(findings[0].unsaved_endpoints)) + # endpoint + self.assertEqual("10.0.0.8", findings[0].unsaved_endpoints[0].host) + self.assertEqual("tcp", findings[0].unsaved_endpoints[0].protocol) + self.assertEqual(22, findings[0].unsaved_endpoints[0].port) + + def test_openvas_csv_many_vuln(self): + with (get_unit_tests_scans_path("openvas") / "many_vuln.csv").open(encoding="utf-8") as f: + test = Test() + test.engagement = Engagement() + test.engagement.product = Product() + parser = OpenVASParser() + findings = parser.get_findings(f, test) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual(4, len(findings)) + # finding + finding = findings[3] + self.assertEqual("HTTP Brute Force Logins With Default Credentials Reporting", finding.title) + self.assertEqual("High", finding.severity) + # endpoints + self.assertEqual(1, len(finding.unsaved_endpoints)) + # endpoint + endpoint = finding.unsaved_endpoints[0] + self.assertEqual("LOGSRV", endpoint.host) + self.assertEqual("tcp", endpoint.protocol) + self.assertEqual(9200, endpoint.port) + finding = findings[2] + self.assertEqual(finding.unsaved_vulnerability_ids[0], "CVE-2011-3389") + + def test_openvas_csv_report_usingCVE(self): + with (get_unit_tests_scans_path("openvas") / "report_using_CVE.csv").open(encoding="utf-8") as f: + test = Test() + test.engagement = Engagement() + test.engagement.product = Product() + parser = OpenVASParser() + findings = parser.get_findings(f, test) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual(43, len(findings)) + finding = findings[4] + self.assertEqual("CVE-2014-0117", finding.title) + self.assertEqual("Medium", finding.severity) + self.assertEqual(4.3, finding.cvssv3_score) + self.assertEqual(finding.unsaved_vulnerability_ids[0], "CVE-2014-0117") + + def test_openvas_csv_report_usingOpenVAS(self): + with (get_unit_tests_scans_path("openvas") / "report_using_openVAS.csv").open(encoding="utf-8") as f: + test = Test() + test.engagement = Engagement() + test.engagement.product = Product() + parser = OpenVASParser() + findings = parser.get_findings(f, test) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual(13, len(findings)) + finding = findings[2] + self.assertEqual("Apache HTTP Server Detection Consolidation", finding.title) + self.assertEqual("Info", finding.severity) + self.assertEqual(finding.unsaved_vulnerability_ids, []) + + def test_openvas_csv_report_combined_findings(self): + with (get_unit_tests_scans_path("openvas") / "report_using_openVAS_findings_to_combine.csv").open(encoding="utf-8") as f: + test = Test() + test.engagement = Engagement() + test.engagement.product = Product() + parser = OpenVASParser() + findings = parser.get_findings(f, test) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual(1, len(findings)) + + def test_openvas_xml_no_vuln(self): + with (get_unit_tests_scans_path("openvas") / "no_vuln.xml").open(encoding="utf-8") as f: + test = Test() + test.engagement = Engagement() + test.engagement.product = Product() + parser = OpenVASParser() + findings = parser.get_findings(f, test) + self.assertEqual(0, len(findings)) + + def test_openvas_xml_one_vuln(self): + with (get_unit_tests_scans_path("openvas") / "one_vuln.xml").open(encoding="utf-8") as f: + test = Test() + test.engagement = Engagement() + test.engagement.product = Product() + parser = OpenVASParser() + findings = parser.get_findings(f, test) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual(1, len(findings)) + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("Mozilla Firefox Security Update (mfsa_2023-32_2023-36) - Windows", finding.title) + self.assertEqual("High", finding.severity) + + def test_openvas_xml_many_vuln(self): + with (get_unit_tests_scans_path("openvas") / "many_vuln.xml").open(encoding="utf-8") as f: + test = Test() + test.engagement = Engagement() + test.engagement.product = Product() + parser = OpenVASParser() + findings = parser.get_findings(f, test) + self.assertEqual(44, len(findings)) + self.assertEqual(44, len([endpoint for finding in findings for endpoint in finding.unsaved_endpoints])) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual("tcp://192.168.1.1001:512", str(findings[0].unsaved_endpoints[0])) From c9d7a427bcb7c8d927cddb72a823f27afca0822e Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Sat, 30 Aug 2025 15:13:22 +0200 Subject: [PATCH 09/15] added testing for v2 and bugfixes --- .../parsers/file/openvas_v2.md | 16 + dojo/tools/openvas_v2/common.py | 62 +- dojo/tools/openvas_v2/csv_parser.py | 17 +- dojo/tools/openvas_v2/parser.py | 4 +- dojo/tools/openvas_v2/xml_parser.py | 29 +- unittests/scans/openvas/many_vuln.xml | 178 ++--- unittests/scans/openvas/no_vuln.csv | 1 + ...s_to_combine.csv => report_combine_v2.csv} | 0 unittests/scans/openvas/report_detail_v2.csv | 20 + unittests/scans/openvas/report_detail_v2.xml | 217 +++++++ unittests/scans/openvas/report_many_v2.csv | 614 ++++++++++++++++++ unittests/tools/test_openvas_parser_v2.py | 223 +++---- 12 files changed, 1128 insertions(+), 253 deletions(-) create mode 100644 docs/content/en/connecting_your_tools/parsers/file/openvas_v2.md create mode 100644 unittests/scans/openvas/no_vuln.csv rename unittests/scans/openvas/{report_using_openVAS_findings_to_combine.csv => report_combine_v2.csv} (100%) create mode 100644 unittests/scans/openvas/report_detail_v2.csv create mode 100644 unittests/scans/openvas/report_detail_v2.xml create mode 100644 unittests/scans/openvas/report_many_v2.csv diff --git a/docs/content/en/connecting_your_tools/parsers/file/openvas_v2.md b/docs/content/en/connecting_your_tools/parsers/file/openvas_v2.md new file mode 100644 index 00000000000..a39bd735fc1 --- /dev/null +++ b/docs/content/en/connecting_your_tools/parsers/file/openvas_v2.md @@ -0,0 +1,16 @@ +--- +title: "OpenVAS Parser V2" +toc_hide: true +--- +This is version 2 of the OpenVAS / Greenbone parser. +You can upload your scanns in eighter csv or xml format. For the parser to recognize the difference they have to end with .csv or .xml. + +### V2 Changes +Version 2 comes with multiple improvments TODO: +- Using using unique_id_from_tool for deduplication +- Increased parsing Consistensy between the xml and csv parser +- Combined findings where the only differences are in fields that can’t be rehashed due to inconsistent values between scans e.g fields with timestamps or packet ids. +- Parser now combines multiple identical findings with different endpoints into one findings with multiple endpoints (instead of multiple findings with one endpoint each) + +### Sample Scan Data +Sample OpenVAS scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/openvas). diff --git a/dojo/tools/openvas_v2/common.py b/dojo/tools/openvas_v2/common.py index dd8d07b1ff5..7d42a1facd3 100644 --- a/dojo/tools/openvas_v2/common.py +++ b/dojo/tools/openvas_v2/common.py @@ -9,10 +9,11 @@ @dataclass class OpenVASFindingAuxData: + """Dataclass to contain all information added later to fields""" - """Dataclass to contain all information added later to description""" - + summary: str = "" qod: str = "" + openvas_result: str = "" def is_valid_severity(severity): @@ -20,32 +21,43 @@ def is_valid_severity(severity): return severity in valid_severity -def update_description(finding: Finding, aux_info: OpenVASFindingAuxData): +def update_finding(finding: Finding, aux_info: OpenVASFindingAuxData): + """Update finding description""" + if aux_info.openvas_result: + finding.steps_to_reproduce = aux_info.openvas_result + if aux_info.summary: + finding.description += f"\n**Summary**: {aux_info.summary}" if aux_info.qod: finding.description += f"\n**QoD**: {aux_info.qod}" def deduplicate(dupes: dict[str, Finding], finding: Finding): """Combine multiple openvas findings into one defectdojo finding with multiple endpoints""" - finding_hash = generate_openvas_finding_hash(finding) - # set for use in global deduplication - finding.unique_id_from_tool = finding_hash + finding_hash = dedup_finding_hash(finding) + # deliberately missuse unique_id_from_tool to save some original values + finding.unique_id_from_tool = id_from_tool_finding_hash(finding) if finding_hash not in dupes: dupes[finding_hash] = finding else: - # openvas does not combine multiple findings into one e.g - # a vunerability in the java runtime may be reported 2 times - # if 2 vulnerable java runtimes are present on the host - # and the only way do differantiate this findings are the specific results (mapped to references by the parser) - # but we cannot hash this field as it can contain data that changes between scans e.g timestamps - # we therfore combine them because duplicates during reimport cause + # OpenVas does not combine multiple findings into one + # e.g if 2 vulnerable java runtimes are present on the host this is reported as 2 finding. + # The only way do differantiate theese findings when they are based on the same vulnerabilty + # is the data in mapped to steps to reproduce. + # However we cannot hash this field as it can contain data that changes between scans + # e.g timestamps or packet ids + # we therfore combine them into one defectdojo finding because duplicates during reimport cause # https://github.com/DefectDojo/django-DefectDojo/issues/3958 org = dupes[finding_hash] - if org.references != finding.references: - org.references += "\n---------------------------------------\n" - org.references += f"**Endpoint**: {finding.unsaved_endpoints[0].host}\n" - org.references += finding.references + if org.steps_to_reproduce != finding.steps_to_reproduce: + if "Endpoint" in org.steps_to_reproduce: + org.steps_to_reproduce += "\n---------------------------------------\n" + org.steps_to_reproduce += f"**Endpoint**: {finding.unsaved_endpoints[0].host}\n" + org.steps_to_reproduce += finding.steps_to_reproduce + else: + tmp = org.steps_to_reproduce + org.steps_to_reproduce = f"**Endpoint**: {org.unsaved_endpoints[0].host}\n" + org.steps_to_reproduce += tmp # combine identical findings on different hosts into one with multiple hosts endpoint = finding.unsaved_endpoints[0] @@ -53,7 +65,21 @@ def deduplicate(dupes: dict[str, Finding], finding: Finding): org.unsaved_endpoints += finding.unsaved_endpoints -def generate_openvas_finding_hash(finding: Finding): +def id_from_tool_finding_hash(finding: Finding): + """Generate a hash that complements final hash generating outside of this parser""" + endpoint = finding.unsaved_endpoints[0] + hash_data = [ + str(endpoint.protocol), + str(endpoint.userinfo), + str(endpoint.port), # keep findings on different port seperate as it may be different applications + str(endpoint.path), + str(endpoint.fragment), + finding.severity, # allows changing severity of finding after import + ] + return hashlib.sha256("|".join(hash_data).encode("utf-8")).hexdigest() + + +def dedup_finding_hash(finding: Finding): """Generate a hash for a finding that is used for deduplication of findings inside the current report""" endpoint = finding.unsaved_endpoints[0] hash_data = [ @@ -63,7 +89,7 @@ def generate_openvas_finding_hash(finding: Finding): str(endpoint.path), str(endpoint.fragment), finding.title, - finding.description, + finding.vuln_id_from_tool, finding.severity, ] return hashlib.sha256("|".join(hash_data).encode("utf-8")).hexdigest() diff --git a/dojo/tools/openvas_v2/csv_parser.py b/dojo/tools/openvas_v2/csv_parser.py index 96aee7f03d6..3db22fee454 100644 --- a/dojo/tools/openvas_v2/csv_parser.py +++ b/dojo/tools/openvas_v2/csv_parser.py @@ -1,11 +1,10 @@ import csv import io -import re from dateutil.parser import parse from dojo.models import Endpoint, Finding -from dojo.tools.openvas.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_description +from dojo.tools.openvas_v2.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_finding def evaluate_bool_value(column_value): @@ -27,6 +26,9 @@ def get_findings(self, filename, test): csv_reader = csv.reader(io.StringIO(content), delimiter=",", quotechar='"') column_names = [column_name.lower() for column_name in next(csv_reader) if column_name] + if "nvt name" not in column_names: + raise "This doesn't seem to be a valid Greenbone/ OpenVAS csv file." + for row in csv_reader: finding = Finding(test=test, dynamic_finding=True, static_finding=False, severity="Info") finding.unsaved_vulnerability_ids = [] @@ -36,7 +38,7 @@ def get_findings(self, filename, test): for value, name in zip(row, column_names, strict=False): self.process_column_element(value, name, finding, aux_info) - update_description(finding, aux_info) + update_finding(finding, aux_info) deduplicate(dupes, finding) return list(dupes.values()) @@ -62,12 +64,7 @@ def process_column_element( for cve in column_value.split(","): finding.unsaved_vulnerability_ids.append(cve) elif column_name == "nvt oid": - cve_pattern = r"CVE-\d{4}-\d{4,7}" # legacy import - cves = re.findall(cve_pattern, column_value) - for cve in cves: - finding.unsaved_vulnerability_ids.append(cve) - if len(cves) == 0: - finding.script_id = column_value + finding.vuln_id_from_tool = column_value elif column_name == "hostname": # strip due to https://github.com/greenbone/gvmd/issues/2378 finding.unsaved_endpoints[0].host = column_value.strip() @@ -93,7 +90,7 @@ def process_column_element( elif column_name == "vulnerability insight": finding.impact = column_value elif column_name == "specific result": - finding.references = column_value + aux_info.openvas_result = column_value elif column_name == "qod": aux_info.qod = column_value # columns not part of default openvas csv export diff --git a/dojo/tools/openvas_v2/parser.py b/dojo/tools/openvas_v2/parser.py index 52f92ab2302..13cab942373 100644 --- a/dojo/tools/openvas_v2/parser.py +++ b/dojo/tools/openvas_v2/parser.py @@ -14,7 +14,7 @@ def get_description_for_scan_types(self, scan_type): def get_findings(self, filename, test): if str(filename.name).endswith(".csv"): - return OpenVASCSVParser().get_findings(filename, test) + return OpenVASCSVParserV2().get_findings(filename, test) if str(filename.name).endswith(".xml"): - return OpenVASXMLParser().get_findings(filename, test) + return OpenVASXMLParserV2().get_findings(filename, test) return None diff --git a/dojo/tools/openvas_v2/xml_parser.py b/dojo/tools/openvas_v2/xml_parser.py index fff9c62f9f6..51a75f8fe2c 100644 --- a/dojo/tools/openvas_v2/xml_parser.py +++ b/dojo/tools/openvas_v2/xml_parser.py @@ -4,7 +4,7 @@ from defusedxml import ElementTree from dojo.models import Endpoint, Finding -from dojo.tools.openvas.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_description +from dojo.tools.openvas_v2.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_finding class OpenVASXMLParserV2: @@ -35,7 +35,7 @@ def get_findings(self, filename, test): for field in result: self.process_field_element(field, finding, aux_info) - update_description(finding, aux_info) + update_finding(finding, aux_info) deduplicate(dupes, finding) return list(dupes.values()) @@ -50,13 +50,14 @@ def parse_nvt_tags(self, text): continue key = part[0:idx] - val = part[idx + 1:] + val = part[idx + 1 :] tags[key] = val return tags def process_field_element(self, field, finding: Finding, aux_info: OpenVASFindingAuxData): if field.tag == "nvt": - finding.script_id = field.get("oid") + # parse general field + finding.vuln_id_from_tool = field.get("oid") nvt_name = field.find("name").text if nvt_name: finding.title = nvt_name @@ -71,6 +72,12 @@ def process_field_element(self, field, finding: Finding, aux_info: OpenVASFindin impact = tags.get("impact", None) if impact: finding.impact = impact + + # parse cves + refs_node = field.find("refs") + if refs_node is not None: + refs = refs_node.findall(".//ref[@type='cve']") + finding.unsaved_vulnerability_ids = [ref.get("id") for ref in refs] elif field.tag == "qod": aux_info.qod = field.find("value").text @@ -79,24 +86,26 @@ def process_field_element(self, field, finding: Finding, aux_info: OpenVASFindin if field.tag == "name": finding.title = field.text - elif field.tag == "hostname": - # strip due to https://github.com/greenbone/gvmd/issues/2378 - finding.unsaved_endpoints[0].host = field.text.strip() elif field.tag == "host": - if not finding.unsaved_endpoints[0].host: + hostname_field = field.find("hostname") + # default to hostname else ip + if hostname_field is not None and hostname_field.text: + # strip due to https://github.com/greenbone/gvmd/issues/2378 + finding.unsaved_endpoints[0].host = hostname_field.text.strip() + else: # strip due to https://github.com/greenbone/gvmd/issues/2378 finding.unsaved_endpoints[0].host = field.text.strip() elif field.tag == "port": port_str, protocol = field.text.split("/") + finding.unsaved_endpoints[0].protocol = protocol with contextlib.suppress(ValueError): finding.unsaved_endpoints[0].port = int(port_str) - finding.unsaved_endpoints[0].protocol = protocol elif field.tag == "severity": finding.cvssv3_score = float(field.text) elif field.tag == "threat": if is_valid_severity(field.text): finding.severity = field.text elif field.tag == "description": - finding.references = field.text.strip() + aux_info.openvas_result = field.text.strip() elif field.tag == "solution": finding.mitigation = field.text diff --git a/unittests/scans/openvas/many_vuln.xml b/unittests/scans/openvas/many_vuln.xml index d3f975d3ef7..baec0128837 100644 --- a/unittests/scans/openvas/many_vuln.xml +++ b/unittests/scans/openvas/many_vuln.xml @@ -149,13 +149,13 @@ - {v1}467e39e554a + 467e39e554a gps 2023-09-29T11:36:37.717168Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 512/tcp nvt @@ -189,13 +189,13 @@ 5 - {v1}530765cf437 + 530765cf437 gps 2023-09-29T11:36:37.717208Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 1524/tcp nvt @@ -224,13 +224,13 @@ 5 - {v1}5f5c7518c92 + 5f5c7518c92 gps 2023-09-29T11:36:37.717216Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 8787/tcp nvt @@ -304,13 +304,13 @@ 5 - {v1}8c49cb44d75 + 8c49cb44d75 gps 2023-09-29T11:36:37.717246Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 general/tcp nvt @@ -349,13 +349,13 @@ 5 - {v1}22a938294ad + 22a938294ad gps 2023-09-29T11:36:37.717262Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -402,13 +402,13 @@ 5 - {v1}9e2edd735b3 + 9e2edd735b3 gps 2023-09-29T11:36:37.717281Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 3632/tcp nvt @@ -453,13 +453,13 @@ 5 - {v1}0b02451a968 + 0b02451a968 gps 2023-09-29T11:36:37.717494Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 5900/tcp nvt @@ -498,13 +498,13 @@ 5 - {v1}e93a2434477 + e93a2434477 gps 2023-09-29T11:36:37.717503Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 5432/tcp nvt @@ -531,13 +531,13 @@ 5 - {v1}3723bfe0094 + 3723bfe0094 gps 2023-09-29T11:36:37.717511Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 6667/tcp nvt @@ -580,13 +580,13 @@ 5 - {v1}3723bfe0094 + 3723bfe0094 gps 2023-09-29T11:36:37.717520Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 6697/tcp nvt @@ -629,13 +629,13 @@ 5 - {v1}a358693375b + a358693375b gps 2023-09-29T11:36:37.717529Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 21/tcp nvt @@ -690,13 +690,13 @@ 5 - {v1}4ecebea5997 + 4ecebea5997 gps 2023-09-29T11:36:37.717538Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -740,13 +740,13 @@ 5 - {v1}dcc8491b116 + dcc8491b116 gps 2023-09-29T11:36:37.717558Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 21/tcp nvt @@ -781,13 +781,13 @@ 5 - {v1}a358693375b + a358693375b gps 2023-09-29T11:36:37.717575Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 2121/tcp nvt @@ -839,13 +839,13 @@ 5 - {v1}edca4d29119 + edca4d29119 gps 2023-09-29T11:36:37.717584Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -883,13 +883,13 @@ 5 - {v1}28996b2da9a + 28996b2da9a gps 2023-09-29T11:36:37.717594Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -956,13 +956,13 @@ 5 - {v1}f209b933bd1 + f209b933bd1 gps 2023-09-29T11:36:37.717604Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 22/tcp nvt @@ -1029,13 +1029,13 @@ 5 - {v1}dcc8491b116 + dcc8491b116 gps 2023-09-29T11:36:37.717613Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 6200/tcp nvt @@ -1070,13 +1070,13 @@ 5 - {v1}d803f61f444 + d803f61f444 gps 2023-09-29T11:36:37.717621Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 5432/tcp nvt @@ -1115,13 +1115,13 @@ 5 - {v1}e70046de17f + e70046de17f gps 2023-09-29T11:36:37.717637Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -1164,13 +1164,13 @@ 5 - {v1}944cfcaaf66 + 944cfcaaf66 gps 2023-09-29T11:36:37.717645Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 21/tcp nvt @@ -1222,13 +1222,13 @@ 5 - {v1}cc1c4db6d4f + cc1c4db6d4f gps 2023-09-29T11:36:37.717654Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -1276,13 +1276,13 @@ 5 - {v1}44d224b77c4 + 44d224b77c4 gps 2023-09-29T11:36:37.717662Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -1314,13 +1314,13 @@ 5 - {v1}e70046de17f + e70046de17f gps 2023-09-29T11:36:37.717670Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -1361,13 +1361,13 @@ 5 - {v1}71c655fd352 + 71c655fd352 gps 2023-09-29T11:36:37.717677Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 445/tcp nvt @@ -1400,13 +1400,13 @@ 5 - {v1}e79b358813f + e79b358813f gps 2023-09-29T11:36:37.717686Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 5432/tcp nvt @@ -1463,13 +1463,13 @@ 5 - {v1}75693259c28 + 75693259c28 gps 2023-09-29T11:36:37.717697Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -1538,13 +1538,13 @@ 5 - {v1}316b754124f + 316b754124f gps 2023-09-29T11:36:37.717709Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 22/tcp nvt @@ -1601,13 +1601,13 @@ 5 - {v1}79868c7d9b2 + 79868c7d9b2 gps 2023-09-29T11:36:37.717720Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 22/tcp nvt @@ -1638,13 +1638,13 @@ 5 - {v1}e3e389ce2ba + e3e389ce2ba gps 2023-09-29T11:36:37.717728Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 5432/tcp nvt @@ -1712,13 +1712,13 @@ 5 - {v1}66ec0c4c6a4 + 66ec0c4c6a4 gps 2023-09-29T11:36:37.717749Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -1756,13 +1756,13 @@ 5 - {v1}fec842e796e + fec842e796e gps 2023-09-29T11:36:37.717762Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 5432/tcp nvt @@ -1808,13 +1808,13 @@ 5 - {v1}bccd1cd5b97 + bccd1cd5b97 gps 2023-09-29T11:36:37.717769Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -1854,13 +1854,13 @@ 5 - {v1}68aaba31879 + 68aaba31879 gps 2023-09-29T11:36:37.717783Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -1909,13 +1909,13 @@ 5 - {v1}4406907af6b + 4406907af6b gps 2023-09-29T11:36:37.717794Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 5900/tcp nvt @@ -1951,13 +1951,13 @@ 5 - {v1}1fa3ebb87ec + 1fa3ebb87ec gps 2023-09-29T11:36:37.717806Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 21/tcp nvt @@ -1991,13 +1991,13 @@ 5 - {v1}1fa3ebb87ec + 1fa3ebb87ec gps 2023-09-29T11:36:37.717816Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 2121/tcp nvt @@ -2031,13 +2031,13 @@ 5 - {v1}e79b358813f + e79b358813f gps 2023-09-29T11:36:37.717825Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 5432/tcp nvt @@ -2099,13 +2099,13 @@ 5 - {v1}9c322581ba5 + 9c322581ba5 gps 2023-09-29T11:36:37.717836Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -2144,13 +2144,13 @@ 5 - {v1}2b0831858b0 + 2b0831858b0 gps 2023-09-29T11:36:37.717847Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -2192,13 +2192,13 @@ 5 - {v1}55390940921 + 55390940921 gps 2023-09-29T11:36:37.717855Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 22/tcp nvt @@ -2266,13 +2266,13 @@ 5 - {v1}1fe916ed11d + 1fe916ed11d gps 2023-09-29T11:36:37.717864Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 80/tcp nvt @@ -2320,13 +2320,13 @@ 5 - {v1}101c559718c + 101c559718c gps 2023-09-29T11:36:37.717875Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 5432/tcp nvt @@ -2370,13 +2370,13 @@ 5 - {v1}fec842e796e + fec842e796e gps 2023-09-29T11:36:37.717887Z - 192.168.1.1001{v1}b6b9f466d63 + 192.168.1.1001b6b9f466d63 5432/tcp nvt @@ -2500,4 +2500,4 @@ 2023-09-26T13:04:00Z - \ No newline at end of file + diff --git a/unittests/scans/openvas/no_vuln.csv b/unittests/scans/openvas/no_vuln.csv new file mode 100644 index 00000000000..0830a74615a --- /dev/null +++ b/unittests/scans/openvas/no_vuln.csv @@ -0,0 +1 @@ +IP,Hostname,Port,Port Protocol,CVSS,Severity,Solution Type,NVT Name,Summary,Specific Result,NVT OID,CVEs,Task ID,Task Name,Timestamp,Result ID,Impact,Solution,Affected Software/OS,Vulnerability Insight,Vulnerability Detection Method,Product Detection Result,BIDs,CERTs,Other References diff --git a/unittests/scans/openvas/report_using_openVAS_findings_to_combine.csv b/unittests/scans/openvas/report_combine_v2.csv similarity index 100% rename from unittests/scans/openvas/report_using_openVAS_findings_to_combine.csv rename to unittests/scans/openvas/report_combine_v2.csv diff --git a/unittests/scans/openvas/report_detail_v2.csv b/unittests/scans/openvas/report_detail_v2.csv new file mode 100644 index 00000000000..31f5968f2fb --- /dev/null +++ b/unittests/scans/openvas/report_detail_v2.csv @@ -0,0 +1,20 @@ +IP,Hostname,Port,Port Protocol,CVSS,Severity,QoD,Solution Type,NVT Name,Summary,Specific Result,NVT OID,CVEs,Task ID,Task Name,Timestamp,Result ID,Impact,Solution,Affected Software/OS,Vulnerability Insight,Vulnerability Detection Method,Product Detection Result,BIDs,CERTs,Other References,Max Severity EPSS score,Max Severity EPSS percentile +10.99.99.99,server99,42,tcp,9.8,High,80,"VendorFix","Microsoft Windows Multiple Vulnerabilities (KB5062557)","This host is missing an important security + update according to Microsoft KB5062557","Vulnerable range: 10.0.17763.0 - 10.0.17763.7557 +File checked: C:\Windows\system32\Ntoskrnl.exe +File version: 10.0.17763.7434 + +",1.3.6.1.4.1.25623.1.0.836484,"CVE-2025-49659,CVE-2025-48823,CVE-2025-49684,CVE-2025-49668,CVE-2025-49744,CVE-2025-49683,CVE-2025-49663,CVE-2025-49725,CVE-2025-49675,CVE-2025-49732,CVE-2025-49722,CVE-2025-49669,CVE-2025-48822,CVE-2025-49740,CVE-2025-49729,CVE-2025-49679,CVE-2025-49667,CVE-2025-49666,CVE-2025-48819,CVE-2025-49742,CVE-2025-49733,CVE-2025-49727,CVE-2025-49680,CVE-2025-49678,CVE-2025-48816,CVE-2025-49673,CVE-2025-49665,CVE-2025-49660,CVE-2025-48821,CVE-2025-48818,CVE-2025-48811,CVE-2025-48806,CVE-2025-48001,CVE-2025-47982,CVE-2025-49753,CVE-2025-49686,CVE-2025-47999,CVE-2025-49730,CVE-2025-49724,CVE-2025-49685,CVE-2025-49681,CVE-2025-49664,CVE-2025-48820,CVE-2025-48817,CVE-2025-48815,CVE-2025-48814,CVE-2025-48808,CVE-2025-48805,CVE-2025-48804,CVE-2025-48803,CVE-2025-48800,CVE-2025-48799,CVE-2025-48003,CVE-2025-48000,CVE-2025-47998,CVE-2025-47996,CVE-2025-47981,CVE-2025-47980,CVE-2025-47975,CVE-2025-47973,CVE-2025-49760,CVE-2025-49726,CVE-2025-49723,CVE-2025-49721,CVE-2025-49716,CVE-2025-36350,CVE-2025-36357,CVE-2025-47991,CVE-2025-49691,CVE-2025-49690,CVE-2025-49689,CVE-2025-49688,CVE-2025-49687,CVE-2025-49676,CVE-2025-49674,CVE-2025-49672,CVE-2025-49671,CVE-2025-49670,CVE-2025-49661,CVE-2025-49658,CVE-2025-49657,CVE-2025-48824,CVE-2025-47987,CVE-2025-47986,CVE-2025-47985,CVE-2025-47984,CVE-2025-47976,CVE-2025-47972,CVE-2025-47971,CVE-2025-47159,CVE-2025-48807,CVE-2025-53789,CVE-2025-49757",4949d3d6-705b-41d5-b494-383860f8c970,"Report",2025-08-22T16:27:22+02:00,ec5f93ff-3447-4171-8485-3b3b3af2edc0,"Successful exploitation allows an attacker + to elevate privileges, execute arbitrary commands, disclose information, + bypass security restrictions, conduct spoofing and denial of service attacks.","The vendor has released updates. Please see + the references for more information.","'- Microsoft Windows 10 Version 1809 for 32-bit Systems + + - Microsoft Windows 10 Version 1809 for x64-based Systems + + - Microsoft Windows Server 2019","","Checks if a vulnerable version is present + on the target host. +Details: +Microsoft Windows Multiple Vulnerabilities (KB5062557) +(OID: 1.3.6.1.4.1.25623.1.0.836484) +Version used: 2025-08-15T07:40:49+02:00 +","","","DFN-CERT-2025-2181,DFN-CERT-2025-1825,WID-SEC-2025-1850,WID-SEC-2025-1790,WID-SEC-2025-1495","",0.00143,0.35177 diff --git a/unittests/scans/openvas/report_detail_v2.xml b/unittests/scans/openvas/report_detail_v2.xml new file mode 100644 index 00000000000..bfa03f3a786 --- /dev/null +++ b/unittests/scans/openvas/report_detail_v2.xml @@ -0,0 +1,217 @@ + + + + admin + + 2025-08-22T15:00:08+02:00 + + 2025-08-22T15:00:08+02:00 + 2025-08-22T17:09:58+02:00 + 0 + 0 + + Report + + + XML + + + + 22.6 + + + severitydescending + + Done + + Report + disable_BruteForce_default + + 0 + Test + + + 100 + + 2025-08-22T15:00:08+02:00 + 2025-08-22T15:01:38+02:00 + Europe/Berlin + CEST + + 1 + 42/tcp10.99.99.999.8High + + + + Microsoft Windows Multiple Vulnerabilities (KB5062557) + + admin + + 2025-08-22T16:55:31+02:00 + + 2025-08-22T16:55:31+02:00 + 10.99.99.99server99 + 42/tcp + + nvt + Microsoft Windows Multiple Vulnerabilities (KB5062557) + Windows : Microsoft Bulletins + 9.8 + + + NVD + 2025-07-08T19:15:38+02:00 + 9.8 + CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H + + + cvss_base_vector=CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H|summary=This host is missing an important security + update according to Microsoft KB5062557|insight=|affected=- Microsoft Windows 10 Version 1809 for 32-bit Systems + + - Microsoft Windows 10 Version 1809 for x64-based Systems + + - Microsoft Windows Server 2019|impact=Successful exploitation allows an attacker + to elevate privileges, execute arbitrary commands, disclose information, + bypass security restrictions, conduct spoofing and denial of service attacks.|solution=The vendor has released updates. Please see + the references for more information.|vuldetect=Checks if a vulnerable version is present + on the target host.|solution_type=VendorFix + The vendor has released updates. Please see + the references for more information. + + + 0.00143 + 0.35177 + + 9.8 + + + + 0.09023 + 0.92284 + + 7.5 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2025-08-15T07:40:49+02:00 + High + 9.8 + + 80 + + + Vulnerable range: 10.0.17763.0 - 10.0.17763.7557 +File checked: C:\Windows\system32\Ntoskrnl.exe +File version: 10.0.17763.7434 + + + High + 9.8 + undefined + + + 2025-08-22T17:09:57+02:00 + + diff --git a/unittests/scans/openvas/report_many_v2.csv b/unittests/scans/openvas/report_many_v2.csv new file mode 100644 index 00000000000..cd285fcc12f --- /dev/null +++ b/unittests/scans/openvas/report_many_v2.csv @@ -0,0 +1,614 @@ +IP,Hostname,Port,Port Protocol,CVSS,Severity,QoD,Solution Type,NVT Name,Summary,Specific Result,NVT OID,CVEs,Task ID,Task Name,Timestamp,Result ID,Impact,Solution,Affected Software/OS,Vulnerability Insight,Vulnerability Detection Method,Product Detection Result,BIDs,CERTs,Other References,Max Severity EPSS score,Max Severity EPSS percentile + 10.63.152.16,l-1ansr01.bzdtst.XX.de,8443,tcp,5.8,Medium,99,"Mitigation","HTTP Debugging Methods (TRACE/TRACK) Enabled","The remote web server supports the TRACE and/or TRACK + methods. TRACE and TRACK are HTTP methods which are used to debug web server connections.","The web server has the following HTTP methods enabled: TRACE +",1.3.6.1.4.1.25623.1.0.11213,"CVE-2003-1567,CVE-2004-2320,CVE-2004-2763,CVE-2005-3398,CVE-2006-4683,CVE-2007-3008,CVE-2008-7253,CVE-2009-2823,CVE-2010-0386,CVE-2012-2223,CVE-2014-7883",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:37+02:00,d766a82b-8765-4a23-8a1-89846bf6cbd4,"An attacker may use this flaw to trick your legitimate web + users to give him their credentials.","Disable the TRACE and TRACK methods in your web server + configuration. + + Please see the manual of your web server or the references for more information.","Web servers with enabled TRACE and/or TRACK methods.","It has been shown that web servers supporting this methods + are subject to cross-site-scripting attacks, dubbed XST for Cross-Site-Tracing, when used in + conjunction with various weaknesses in browsers.","Checks if HTTP methods such as TRACE and TRACK are + enabled and can be used. +Details: +HTTP Debugging Methods (TRACE/TRACK) Enabled +(OID: 1.3.6.1.4.1.25623.1.0.11213) +Version used: 2023-08-01T15:29:10+02:00 +","","","DFN-CERT-2021-1825,DFN-CERT-2014-1018,DFN-CERT-2010-0020,CB-K14/0981","",0.82689,0.99199 +10.63.152.16,l-1ansr01.bzdtst.XX.de,8443,tcp,5.8,Medium,99,"Mitigation","HTTP Debugging Methods (TRACE/TRACK) Enabled","The remote web server supports the TRACE and/or TRACK + methods. TRACE and TRACK are HTTP methods which are used to debug web server connections.","The web server has the following HTTP methods enabled: TRACE +",1.3.6.1.4.1.25623.1.0.11213,"CVE-2003-1567,CVE-2004-2320,CVE-2004-2763,CVE-2005-3398,CVE-2006-4683,CVE-2007-3008,CVE-2008-7253,CVE-2009-2823,CVE-2010-0386,CVE-2012-2223,CVE-2014-7883",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:37+02:00,a8b81531-f2b7-4dd5-ad11-245141e96676,"An attacker may use this flaw to trick your legitimate web + users to give him their credentials.","Disable the TRACE and TRACK methods in your web server + configuration. + + Please see the manual of your web server or the references for more information.","Web servers with enabled TRACE and/or TRACK methods.","It has been shown that web servers supporting this methods + are subject to cross-site-scripting attacks, dubbed XST for Cross-Site-Tracing, when used in + conjunction with various weaknesses in browsers.","Checks if HTTP methods such as TRACE and TRACK are + enabled and can be used. +Details: +HTTP Debugging Methods (TRACE/TRACK) Enabled +(OID: 1.3.6.1.4.1.25623.1.0.11213) +Version used: 2023-08-01T15:29:10+02:00 +","","","DFN-CERT-2021-1825,DFN-CERT-2014-1018,DFN-CERT-2010-0020,CB-K14/0981","",0.82689,0.99199 +10.63.152.41,l-1bcksrv1.bzdtst.XX.de,80,tcp,5.8,Medium,99,"Mitigation","HTTP Debugging Methods (TRACE/TRACK) Enabled","The remote web server supports the TRACE and/or TRACK + methods. TRACE and TRACK are HTTP methods which are used to debug web server connections.","The web server has the following HTTP methods enabled: TRACE +",1.3.6.1.4.1.25623.1.0.11213,"CVE-2003-1567,CVE-2004-2320,CVE-2004-2763,CVE-2005-3398,CVE-2006-4683,CVE-2007-3008,CVE-2008-7253,CVE-2009-2823,CVE-2010-0386,CVE-2012-2223,CVE-2014-7883",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:40+02:00,c921a4d-bb34-4c80-8b88-0fbfa9a51cbb,"An attacker may use this flaw to trick your legitimate web + users to give him their credentials.","Disable the TRACE and TRACK methods in your web server + configuration. + + Please see the manual of your web server or the references for more information.","Web servers with enabled TRACE and/or TRACK methods.","It has been shown that web servers supporting this methods + are subject to cross-site-scripting attacks, dubbed XST for Cross-Site-Tracing, when used in + conjunction with various weaknesses in browsers.","Checks if HTTP methods such as TRACE and TRACK are + enabled and can be used. +Details: +HTTP Debugging Methods (TRACE/TRACK) Enabled +(OID: 1.3.6.1.4.1.25623.1.0.11213) +Version used: 2023-08-01T15:29:10+02:00 +","","","DFN-CERT-2021-1825,DFN-CERT-2014-1018,DFN-CERT-2010-0020,CB-K14/0981","",0.82689,0.99199 +10.63.152.9,l-1ssms01.bzdtst.XX.de,8443,tcp,5.0,Medium,99,"Mitigation","SSL/TLS: Known Untrusted / Dangerous Certificate Authority (CA) Detection","The service is using an SSL/TLS certificate from a known + untrusted and/or dangerous certificate authority (CA).","The certificate of the remote service is signed by the following untrusted and/or dangerous CA: + +Issuer: CN=localhost,OU=ELO,O=Cisco,L=Cary,ST=NC,C=US + +Certificate details: +fingerABCnt (SHA-1) | 60222AD577271604EFEA8E31FD765F20BB71B72B +fingerABCnt (SHA-256) | E456E1F908F6C27F77B06310F797E34A081172A1CD4FA029D141B099EC429340 +issued by | CN=localhost,OU=ELO,O=Cisco,L=Cary,ST=NC,C=US +public key algorithm | RSA +public key size (bits) | 2048 +serial | 2A7EE654F54609A5 +signature algorithm | sha256WithRSAEncryption +subject | CN=localhost,OU=ELO,O=Cisco,L=Cary,ST=NC,C=US +subject alternative names (SAN) | localhost +valid from | 2025-02-10 07:23:18 UTC +valid until | 2026-02-10 07:23:18 UTC +",1.3.6.1.4.1.25623.1.0.113054,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:44:56+02:00,053e6331-484c-4d45-ba14-6c11422fc588,"An attacker could use this for man-in-the-middle (MITM) attacks, + accessing sensible data and other attacks.","Replace the SSL/TLS certificate with one signed by a trusted + CA.","","","The script reads the certificate used by the target host and + checks if it was signed by a known untrusted and/or dangerous CA. +Details: +SSL/TLS: Known Untrusted / Dangerous Certificate Authority (CA) Detection +(OID: 1.3.6.1.4.1.25623.1.0.113054) +Version used: 2024-06-14T07:05:48+02:00 +","Product: cpe:/a:ietf:transport_layer_security +Method: SSL/TLS: Collect and Report Certificate Details +(OID: 1.3.6.1.4.1.25623.1.0.103692) +","","","",, +10.63.152.21,l-1cmXX01.bzdtst.XX.de,135,tcp,5.0,Medium,80,"Mitigation","DCE/RPC and MSRPC Services Enumeration Reporting","Distributed Computing Environment / Remote Procedure Calls (DCE/RPC) or MSRPC services running + on the remote host can be enumerated by connecting on port 135 and doing the approABCate queries.","Here is the list of DCE/RPC or MSRPC services running on this host via the TCP protocol: + +Port: 49664/tcp + + UUID: 12345778-1234-abcd-ef00-0123456789ac, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49664] + Named pipe : lsass + Win32 service or process : lsass.exe + Description : SAM access + + UUID: 51a227ae-825b-41f2-b4a9-1ac9557a1018, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49664] + Annotation: Ngc Pop Key Service + + UUID: 8fb74744-b2ff-4c00-be0d-9ef9a191fe1b, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49664] + Annotation: Ngc Pop Key Service + + UUID: b25a52bf-e5dd-4f4a-aea6-8ca7272a0e86, version 2 + Endpoint: ncacn_ip_tcp:10.63.152.21[49664] + Annotation: KeyIso + +Port: 49665/tcp + + UUID: d95afe70-a6d5-4259-822e-2c84da1dXX0d, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49665] + +Port: 49666/tcp + + UUID: f6beaff7-1e19-4fbb-9f8f-b89e2018337c, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49666] + Annotation: Event log TCPIP + +Port: 49667/tcp + + UUID: 3a9ef155-691d-4449-8d05-09ad57031823, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49667] + + UUID: 8615949-83c9-4044-b424-XX363231fd0c, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49667] + +Port: 49668/tcp + + UUID: 0b6eXXfa-4a24-4fc6-8a23-942b1eca65d1, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49668] + + UUID: 12345678-1234-abcd-ef00-0123456789ab, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49668] + Named pipe : spoolss + Win32 service or process : spoolsv.exe + Description : Spooler service + + UUID: 4a452661-8290-4b36-8fbe-7f4093a94978, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49668] + + UUID: 76f03f96-cdfd-44fc-a22c-64950a001209, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49668] + + UUID: ae33069b-a2a8-46ee-a235-ddf139be281, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[49668] + +Port: 59492/tcp + + UUID: 6b5bdd1e-528c-422c-af8c-a4079be4fe48, version 1 + Endpoint: ncacn_ip_tcp:10.63.152.21[59492] + Annotation: Remote Fw APIs + +Port: 59494/tcp + + UUID: 367abb81-9844-35f1-a12-98f038001003, version 2 + Endpoint: ncacn_ip_tcp:10.63.152.21[59494] + +Note: DCE/RPC or MSRPC services running on this host locally were identified. Reporting this list is not enabled by default due to the possible large size of this list. See the script preferences to enable this reporting. +",1.3.6.1.4.1.25623.1.0.10736,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:08:49+02:00,5e84cc6e-df7e-4beb-981f-5f9c22f6482e,"An attacker may use this fact to gain more knowledge + about the remote host.","Filter incoming traffic to this ports.","",""," +Details: +DCE/RPC and MSRPC Services Enumeration Reporting +(OID: 1.3.6.1.4.1.25623.1.0.10736) +Version used: 2022-06-03T12:17:07+02:00 +","","","","",, +10.63.152.41,l-1bcksrv1.bzdtst.XX.de,80,tcp,4.8,Medium,80,"Workaround","Cleartext Transmission of Sensitive Information via HTTP","The host / application transmits sensitive information (username, passwords) in + cleartext via HTTP.","The following URLs requires Basic Authentication (URL:realm name): + +http://l-1bcksrv1.bzdtst.XX.de/:""Protected"" +",1.3.6.1.4.1.25623.1.0.108440,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:40+02:00,12e0104e-a1da-4819-8c11-ef4c57d0e561,"An attacker could use this situation to compromise or eavesdrop on the + HTTP communication between the client and the server using a man-in-the-middle attack to get access to + sensitive data like usernames or passwords.","Enforce the transmission of sensitive data via an encrypted SSL/TLS connection. + Additionally make sure the host / application is redirecting all users to the secured SSL/TLS connection before + allowing to input sensitive data into the mentioned functions.","Hosts / applications which doesn't enforce the transmission of sensitive data via an + encrypted SSL/TLS connection.","","Evaluate previous collected information and check if the host / application is not + enforcing the transmission of sensitive data via an encrypted SSL/TLS connection. + + The script is currently checking the following: + + - HTTP Basic Authentication (Basic Auth) + + - HTTP Forms (e.g. Login) with input field of type 'password' +Details: +Cleartext Transmission of Sensitive Information via HTTP +(OID: 1.3.6.1.4.1.25623.1.0.108440) +Version used: 2023-09-07T07:05:21+02:00 +","","","","",, +10.63.152.202,l-1esx02-b.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 +Fixed version: 3.4.26 + +",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:46+02:00,5c04507c-1d00-4327-b50f-8f7fb23d6de9,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) + associated to a lease when Keys parameter is true, even a user doesn't have read permission to + the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. +Details: +etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... +(OID: 1.3.6.1.4.1.25623.1.0.149673) +Version used: 2023-10-12T07:05:32+02:00 +","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 +10.63.152.203,l-1esx03-b.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 +Fixed version: 3.4.26 + +",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:49:35+02:00,38a2f8a7-c098-4ac7-af84-85f0fbf206f9,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) + associated to a lease when Keys parameter is true, even a user doesn't have read permission to + the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. +Details: +etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... +(OID: 1.3.6.1.4.1.25623.1.0.149673) +Version used: 2023-10-12T07:05:32+02:00 +","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 +10.63.152.204,l-1esx04-b.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 +Fixed version: 3.4.26 + +",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:10:24+02:00,66cf2b76-6441-4ece-b2af-6761e387df0c,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) + associated to a lease when Keys parameter is true, even a user doesn't have read permission to + the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. +Details: +etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... +(OID: 1.3.6.1.4.1.25623.1.0.149673) +Version used: 2023-10-12T07:05:32+02:00 +","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 +10.63.152.102,l-1esx02.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 +Fixed version: 3.4.26 + +",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:01:32+02:00,ff414d86-3478-415a-a349-bc91910e51b9,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) + associated to a lease when Keys parameter is true, even a user doesn't have read permission to + the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. +Details: +etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... +(OID: 1.3.6.1.4.1.25623.1.0.149673) +Version used: 2023-10-12T07:05:32+02:00 +","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 +10.63.152.103,l-1esx03.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 +Fixed version: 3.4.26 + +",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:06:08+02:00,e040081f-bab5-470e-8e50-f7f63a61b1c2,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) + associated to a lease when Keys parameter is true, even a user doesn't have read permission to + the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. +Details: +etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... +(OID: 1.3.6.1.4.1.25623.1.0.149673) +Version used: 2023-10-12T07:05:32+02:00 +","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 +10.63.152.104,l-1esx04.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 +Fixed version: 3.4.26 + +",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:59:13+02:00,1188c31e-8e34-4520-b4cb-cc1b5ef62e21,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) + associated to a lease when Keys parameter is true, even a user doesn't have read permission to + the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. +Details: +etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... +(OID: 1.3.6.1.4.1.25623.1.0.149673) +Version used: 2023-10-12T07:05:32+02:00 +","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 +10.63.152.100,l-1vcsa01.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 1497783025 +Packet 2: 1497784105 +",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:46:45+02:00,2eb74ccf-bfb8-4e9a-8df3-67b70bd52ba1,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T17:10:08+02:00 +","","","","",, +10.63.152.102,l-1esx02.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 2895610065 +Packet 2: 405332881 +",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:01:32+02:00,54dc8d63-eb80-4196-89b4-fa9d6387ee2c,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T17:10:08+02:00 +","","","","",, +10.63.152.103,l-1esx03.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 2934590030 +Packet 2: 1131840991 +",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:06:08+02:00,b1581415-59e2-4847-b867-39d1878c233e,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T17:10:08+02:00 +","","","","",, +10.63.152.202,l-1esx02-b.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 1740125501 +Packet 2: 2510433761 +",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:46+02:00,73fa94c7-5b05-4196-8c7f-ade387379867,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T17:10:08+02:00 +","","","","",, +10.63.152.203,l-1esx03-b.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 54910807 +Packet 2: 820590328 +",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:49:35+02:00,a6734c0d-1fd1-49ce-8b00-0271a7b8cb62,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T17:10:08+02:00 +","","","","",, +10.63.152.104,l-1esx04.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 2684704561 +Packet 2: 2831912580 +",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:59:13+02:00,590XX466-856e-43b7-931e-05648f01942d,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T17:10:08+02:00 +","","","","",, +10.63.152.204,l-1esx04-b.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 2272643113 +Packet 2: 1342986068 +",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:10:24+02:00,70e1a404-4ed0-4823-8d8e-d277560123e7,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T17:10:08+02:00 +","","","","",, +10.63.152.201,l-1esx01-b.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 4279311885 +Packet 2: 120745172 +",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:43+02:00,24aa905b-b8d4-4a49-9b1d-7d6797c4cee2,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T17:10:08+02:00 +","","","","",, +10.63.152.101,l-1esx01.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 3630835335 +Packet 2: 2057814760 +",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T04:02:37+02:00,2b9ffca6-7134-467e-8c95-a385b8cfd20e,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T17:10:08+02:00 +","","","","",, +10.63.152.21,l-1cmXX01.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows + to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. + +The following timestamps were retrieved with a delay of 1 seconds in-between: +Packet 1: 762318062 +Packet 2: 762319141 +",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:08:49+02:00,423b0e90-7407-4d6e-b0fc-3a4039635000,"A side effect of this feature is that the uptime of the remote + host can sometimes be computed.","To disable TCP timestamps on linux add the line + 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at + runtime. + + To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' + + Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. + + The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when + initiating TCP connections, but use them if the TCP peer that is initiating communication includes + them in their synchronize (SYN) segment. + + See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by + RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in + between to the target IP. The responses are searched for a timestamps. If found, the timestamps + are reported. +Details: +TCP Timestamps Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.80091) +Version used: 2023-12-15T17:10:08+02:00 +","","","","",, +10.63.152.16,l-1ansr01.bzdtst.XX.de,,,2.1,Low,80,"Mitigation","ICMP Timestamp Reply Information Disclosure","The remote host responded to an ICMP timestamp request.","The following response / ICMP packet has been received: +- ICMP Type: 14 +- ICMP Code: 0 +",1.3.6.1.4.1.25623.1.0.103190,"CVE-1999-0524",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:37+02:00,c670f102-1614-40fd-b1e1-9df7751a98d8,"This information could theoretically be used to exploit weak + time-based random number generators in other services.","Various mitigations are possible: + + - Disable the support for ICMP timestamp on the remote host completely + + - Protect the remote host by a firewall, and block ICMP packets passing through the firewall in + either direction (either completely or only for untrusted networks)","","The Timestamp Reply is an ICMP message which replies to a + Timestamp message. It consists of the originating timestamp sent by the sender of the Timestamp as + well as a receive timestamp and a transmit timestamp.","Sends an ICMP Timestamp (Type 13) request and checks if a + Timestamp Reply (Type 14) is received. +Details: +ICMP Timestamp Reply Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.103190) +Version used: 2025-01-21T06:37:33+02:00 +","","","DFN-CERT-2014-0658,CB-K15/1514,CB-K14/0632","",0.00460,0.61398 +10.63.152.199,l-1moni04.bzdtst.XX.de,,,2.1,Low,80,"Mitigation","ICMP Timestamp Reply Information Disclosure","The remote host responded to an ICMP timestamp request.","The following response / ICMP packet has been received: +- ICMP Type: 14 +- ICMP Code: 0 +",1.3.6.1.4.1.25623.1.0.103190,"CVE-1999-0524",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:06:22+02:00,7fd03e88-91c5-493f-a5dd-21f1b424c309,"This information could theoretically be used to exploit weak + time-based random number generators in other services.","Various mitigations are possible: + + - Disable the support for ICMP timestamp on the remote host completely + + - Protect the remote host by a firewall, and block ICMP packets passing through the firewall in + either direction (either completely or only for untrusted networks)","","The Timestamp Reply is an ICMP message which replies to a + Timestamp message. It consists of the originating timestamp sent by the sender of the Timestamp as + well as a receive timestamp and a transmit timestamp.","Sends an ICMP Timestamp (Type 13) request and checks if a + Timestamp Reply (Type 14) is received. +Details: +ICMP Timestamp Reply Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.103190) +Version used: 2025-01-21T06:37:33+02:00 +","","","DFN-CERT-2014-0658,CB-K15/1514,CB-K14/0632","",0.00460,0.61398 +10.63.152.41,l-1bcksrv1.bzdtst.XX.de,,,2.1,Low,80,"Mitigation","ICMP Timestamp Reply Information Disclosure","The remote host responded to an ICMP timestamp request.","The following response / ICMP packet has been received: +- ICMP Type: 14 +- ICMP Code: 0 +",1.3.6.1.4.1.25623.1.0.103190,"CVE-1999-0524",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:40+02:00,23e24245-749e-4c1d-a7c9-1eb14XXbb550,"This information could theoretically be used to exploit weak + time-based random number generators in other services.","Various mitigations are possible: + + - Disable the support for ICMP timestamp on the remote host completely + + - Protect the remote host by a firewall, and block ICMP packets passing through the firewall in + either direction (either completely or only for untrusted networks)","","The Timestamp Reply is an ICMP message which replies to a + Timestamp message. It consists of the originating timestamp sent by the sender of the Timestamp as + well as a receive timestamp and a transmit timestamp.","Sends an ICMP Timestamp (Type 13) request and checks if a + Timestamp Reply (Type 14) is received. +Details: +ICMP Timestamp Reply Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.103190) +Version used: 2025-01-21T06:37:33+02:00 +","","","DFN-CERT-2014-0658,CB-K15/1514,CB-K14/0632","",0.00460,0.61398 +10.63.152.9,l-1ssms01.bzdtst.XX.de,,,2.1,Low,80,"Mitigation","ICMP Timestamp Reply Information Disclosure","The remote host responded to an ICMP timestamp request.","The following response / ICMP packet has been received: +- ICMP Type: 14 +- ICMP Code: 0 +",1.3.6.1.4.1.25623.1.0.103190,"CVE-1999-0524",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:44:56+02:00,76cb06b0-29a2-493a-a895-add4b3bbd5bb,"This information could theoretically be used to exploit weak + time-based random number generators in other services.","Various mitigations are possible: + + - Disable the support for ICMP timestamp on the remote host completely + + - Protect the remote host by a firewall, and block ICMP packets passing through the firewall in + either direction (either completely or only for untrusted networks)","","The Timestamp Reply is an ICMP message which replies to a + Timestamp message. It consists of the originating timestamp sent by the sender of the Timestamp as + well as a receive timestamp and a transmit timestamp.","Sends an ICMP Timestamp (Type 13) request and checks if a + Timestamp Reply (Type 14) is received. +Details: +ICMP Timestamp Reply Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.103190) +Version used: 2025-01-21T06:37:33+02:00 +","","","DFN-CERT-2014-0658,CB-K15/1514,CB-K14/0632","",0.00460,0.61398 +10.63.152.21,l-1cmXX01.bzdtst.XX.de,,,2.1,Low,80,"Mitigation","ICMP Timestamp Reply Information Disclosure","The remote host responded to an ICMP timestamp request.","The following response / ICMP packet has been received: +- ICMP Type: 14 +- ICMP Code: 0 +",1.3.6.1.4.1.25623.1.0.103190,"CVE-1999-0524",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:08:49+02:00,6e8e6ab2-5ae1-44e8-b6bf-f23a50674671,"This information could theoretically be used to exploit weak + time-based random number generators in other services.","Various mitigations are possible: + + - Disable the support for ICMP timestamp on the remote host completely + + - Protect the remote host by a firewall, and block ICMP packets passing through the firewall in + either direction (either completely or only for untrusted networks)","","The Timestamp Reply is an ICMP message which replies to a + Timestamp message. It consists of the originating timestamp sent by the sender of the Timestamp as + well as a receive timestamp and a transmit timestamp.","Sends an ICMP Timestamp (Type 13) request and checks if a + Timestamp Reply (Type 14) is received. +Details: +ICMP Timestamp Reply Information Disclosure +(OID: 1.3.6.1.4.1.25623.1.0.103190) +Version used: 2025-01-21T06:37:33+02:00 +","","","DFN-CERT-2014-0658,CB-K15/1514,CB-K14/0632","",0.00460,0.61398 diff --git a/unittests/tools/test_openvas_parser_v2.py b/unittests/tools/test_openvas_parser_v2.py index 4d383d544a7..bd8cc7ba738 100644 --- a/unittests/tools/test_openvas_parser_v2.py +++ b/unittests/tools/test_openvas_parser_v2.py @@ -1,135 +1,110 @@ +import io + from dojo.models import Engagement, Product, Test -from dojo.tools.openvas.parser import OpenVASParser +from dojo.tools.openvas_v2.parser import OpenVASParserV2 from unittests.dojo_test_case import DojoTestCase, get_unit_tests_scans_path -class TestOpenVASParser(DojoTestCase): - def test_openvas_csv_one_vuln(self): - with (get_unit_tests_scans_path("openvas") / "one_vuln.csv").open(encoding="utf-8") as f: - test = Test() - test.engagement = Engagement() - test.engagement.product = Product() - parser = OpenVASParser() - findings = parser.get_findings(f, test) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - self.assertEqual(1, len(findings)) - # finding - self.assertEqual("SSH Weak Encryption Algorithms Supported", findings[0].title) - self.assertEqual("Medium", findings[0].severity) - # endpoints - self.assertEqual(1, len(findings[0].unsaved_endpoints)) - # endpoint - self.assertEqual("10.0.0.8", findings[0].unsaved_endpoints[0].host) - self.assertEqual("tcp", findings[0].unsaved_endpoints[0].protocol) - self.assertEqual(22, findings[0].unsaved_endpoints[0].port) - - def test_openvas_csv_many_vuln(self): - with (get_unit_tests_scans_path("openvas") / "many_vuln.csv").open(encoding="utf-8") as f: - test = Test() - test.engagement = Engagement() - test.engagement.product = Product() - parser = OpenVASParser() - findings = parser.get_findings(f, test) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - self.assertEqual(4, len(findings)) - # finding - finding = findings[3] - self.assertEqual("HTTP Brute Force Logins With Default Credentials Reporting", finding.title) - self.assertEqual("High", finding.severity) - # endpoints - self.assertEqual(1, len(finding.unsaved_endpoints)) - # endpoint - endpoint = finding.unsaved_endpoints[0] - self.assertEqual("LOGSRV", endpoint.host) - self.assertEqual("tcp", endpoint.protocol) - self.assertEqual(9200, endpoint.port) - finding = findings[2] - self.assertEqual(finding.unsaved_vulnerability_ids[0], "CVE-2011-3389") - - def test_openvas_csv_report_usingCVE(self): - with (get_unit_tests_scans_path("openvas") / "report_using_CVE.csv").open(encoding="utf-8") as f: - test = Test() - test.engagement = Engagement() - test.engagement.product = Product() - parser = OpenVASParser() - findings = parser.get_findings(f, test) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - self.assertEqual(43, len(findings)) - finding = findings[4] - self.assertEqual("CVE-2014-0117", finding.title) - self.assertEqual("Medium", finding.severity) - self.assertEqual(4.3, finding.cvssv3_score) - self.assertEqual(finding.unsaved_vulnerability_ids[0], "CVE-2014-0117") - - def test_openvas_csv_report_usingOpenVAS(self): - with (get_unit_tests_scans_path("openvas") / "report_using_openVAS.csv").open(encoding="utf-8") as f: - test = Test() - test.engagement = Engagement() - test.engagement.product = Product() - parser = OpenVASParser() - findings = parser.get_findings(f, test) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - self.assertEqual(13, len(findings)) - finding = findings[2] - self.assertEqual("Apache HTTP Server Detection Consolidation", finding.title) - self.assertEqual("Info", finding.severity) - self.assertEqual(finding.unsaved_vulnerability_ids, []) +def openvas_open(file): + """Helper to get file handle to openvas test files""" + return (get_unit_tests_scans_path("openvas") / file).open(encoding="utf-8") - def test_openvas_csv_report_combined_findings(self): - with (get_unit_tests_scans_path("openvas") / "report_using_openVAS_findings_to_combine.csv").open(encoding="utf-8") as f: - test = Test() - test.engagement = Engagement() - test.engagement.product = Product() - parser = OpenVASParser() - findings = parser.get_findings(f, test) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - self.assertEqual(1, len(findings)) + +def setup_openvas_v2_test(f): + """Setup helper for general openvas_v2 test setup""" + test = Test() + test.engagement = Engagement() + test.engagement.product = Product() + parser = OpenVASParserV2() + findings = parser.get_findings(f, test) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + return findings + + +class TestOpenVASParserV2(DojoTestCase): + # test empty cases + def test_openvas_csv_no_vuln(self): + """Ensure that an empty report does not throw and error and reports 0 findings""" + with openvas_open("no_vuln.csv") as f: + findings = setup_openvas_v2_test(f) + self.assertEqual(0, len(findings)) def test_openvas_xml_no_vuln(self): - with (get_unit_tests_scans_path("openvas") / "no_vuln.xml").open(encoding="utf-8") as f: - test = Test() - test.engagement = Engagement() - test.engagement.product = Product() - parser = OpenVASParser() - findings = parser.get_findings(f, test) + """Ensure that an empty report does not throw and error and reports 0 findings""" + with openvas_open("no_vuln.xml") as f: + findings = setup_openvas_v2_test(f) self.assertEqual(0, len(findings)) - def test_openvas_xml_one_vuln(self): - with (get_unit_tests_scans_path("openvas") / "one_vuln.xml").open(encoding="utf-8") as f: - test = Test() - test.engagement = Engagement() - test.engagement.product = Product() - parser = OpenVASParser() - findings = parser.get_findings(f, test) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() + def test_openvas_parser_csv_detail(self): + """Ensure finding contains report data as expected""" + with openvas_open("report_detail_v2.csv") as f: + findings = setup_openvas_v2_test(f) + + # ensure single finding + self.assertEqual(len(findings), 1) + finding = findings[0] + + # general finding info tests + self.assertEqual("Microsoft Windows Multiple Vulnerabilities (KB5062557)", finding.title) + self.assertEqual("High", finding.severity) # OpenVAS report Critical findings as High + self.assertEqual(9.8, finding.cvssv3_score) + + # vulnerability id tests + self.assertEqual(finding.vuln_id_from_tool, "1.3.6.1.4.1.25623.1.0.836484") + self.assertEqual(finding.unsaved_vulnerability_ids[1], "CVE-2025-48823") + self.assertEqual(93, len(finding.unsaved_vulnerability_ids)) + + # endpoint tests + self.assertEqual(1, len(finding.unsaved_endpoints)) + self.assertEqual("server99", finding.unsaved_endpoints[0].host) + # this is example data normaly tested finding does not include this + self.assertEqual(42, finding.unsaved_endpoints[0].port) + self.assertEqual("tcp", finding.unsaved_endpoints[0].protocol) + + def test_openvas_parser_csv_xml_parity(self): + """Ensure xml and csv parser parse data that is the same between report in the same way""" + with openvas_open("report_detail_v2.csv") as f: + findings_csv = setup_openvas_v2_test(f) + with openvas_open("report_detail_v2.xml") as f: + findings_xml = setup_openvas_v2_test(f) + + f_xml = findings_xml[0] + f_csv = findings_csv[0] + + # ensure same general finding parsing behaviour + self.assertEqual(f_xml.title, f_csv.title) + self.assertEqual(f_xml.severity, f_csv.severity) + self.assertEqual(f_xml.cvssv3_score, f_csv.cvssv3_score) + # remove this if future parser versions want different description behaviour + self.assertEqual(f_xml.description, f_csv.description) + + # ensure same vulnerability id parsing behaviour + self.assertEqual(f_xml.vuln_id_from_tool, f_csv.vuln_id_from_tool) + # xml has multiple types of vulnerability ids, change this if a new one is parsed + self.assertEqual(len(f_xml.unsaved_vulnerability_ids), len(f_csv.unsaved_vulnerability_ids)) + self.assertEqual(f_xml.unsaved_vulnerability_ids, f_csv.unsaved_vulnerability_ids) + + # ensure same endpoint parsing behaviour + self.assertEqual(f_xml.unsaved_endpoints[0].host, f_csv.unsaved_endpoints[0].host) + self.assertEqual(f_xml.unsaved_endpoints[0].protocol, f_csv.unsaved_endpoints[0].protocol) + self.assertEqual(f_xml.unsaved_endpoints[0].port, f_csv.unsaved_endpoints[0].port) + + def test_openvas_csv_report_combined_findings(self): + """Ensure findings combinding behaviour""" + with openvas_open("report_combine_v2.csv") as f: + findings = setup_openvas_v2_test(f) self.assertEqual(1, len(findings)) - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("Mozilla Firefox Security Update (mfsa_2023-32_2023-36) - Windows", finding.title) - self.assertEqual("High", finding.severity) - - def test_openvas_xml_many_vuln(self): - with (get_unit_tests_scans_path("openvas") / "many_vuln.xml").open(encoding="utf-8") as f: - test = Test() - test.engagement = Engagement() - test.engagement.product = Product() - parser = OpenVASParser() - findings = parser.get_findings(f, test) + + def test_openvas_csv_many_findings(self): + """Ensure findings combinding behaviour""" + with openvas_open("many_vuln.csv") as f: + findings = setup_openvas_v2_test(f) + self.assertEqual(4, len(findings)) + + def test_openvas_xml_many_findings(self): + """Ensure findings combinding behaviour""" + with openvas_open("many_vuln.xml") as f: + findings = setup_openvas_v2_test(f) self.assertEqual(44, len(findings)) - self.assertEqual(44, len([endpoint for finding in findings for endpoint in finding.unsaved_endpoints])) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - self.assertEqual("tcp://192.168.1.1001:512", str(findings[0].unsaved_endpoints[0])) From 2ff1d30fb626336e6aceceeb6a2c8183241141a8 Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Sat, 30 Aug 2025 16:46:22 +0200 Subject: [PATCH 10/15] bugfix and cosmetic things --- dojo/settings/settings.dist.py | 4 +++- dojo/tools/openvas_v2/common.py | 10 ++++++---- dojo/tools/openvas_v2/csv_parser.py | 8 ++++---- dojo/tools/openvas_v2/parser.py | 6 +++--- dojo/tools/openvas_v2/xml_parser.py | 8 ++++---- unittests/tools/test_openvas_parser_v2.py | 4 ++-- 6 files changed, 22 insertions(+), 18 deletions(-) diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index db43dad1f9e..4e52b029c43 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1356,6 +1356,7 @@ def saml2_attrib_map_format(din): "Qualys Hacker Guardian Scan": ["title", "severity", "description"], "Cyberwatch scan (Galeax)": ["title", "description", "severity"], "Cycognito Scan": ["title", "severity"], + "OpenVAS Parser v2": ["title", "unique_id_from_tool", "vuln_id_from_tool"], } # Override the hardcoded settings here via the env var @@ -1427,6 +1428,7 @@ def saml2_attrib_map_format(din): "HCL AppScan on Cloud SAST XML": True, "AWS Inspector2 Scan": True, "Cyberwatch scan (Galeax)": True, + "OpenVAS Parser v2": True, } # List of fields that are known to be usable in hash_code computation) @@ -1613,7 +1615,7 @@ def saml2_attrib_map_format(din): "Red Hat Satellite": DEDUPE_ALGO_HASH_CODE, "Qualys Hacker Guardian Scan": DEDUPE_ALGO_HASH_CODE, "Cyberwatch scan (Galeax)": DEDUPE_ALGO_HASH_CODE, - "OpenVAS Parser": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, + "OpenVAS Parser v2": DEDUPE_ALGO_HASH_CODE, } # Override the hardcoded settings here via the env var diff --git a/dojo/tools/openvas_v2/common.py b/dojo/tools/openvas_v2/common.py index 7d42a1facd3..80c7ed00b58 100644 --- a/dojo/tools/openvas_v2/common.py +++ b/dojo/tools/openvas_v2/common.py @@ -1,14 +1,12 @@ import hashlib -import os from dataclasses import dataclass from dojo.models import Finding -OPENVAS_SEVERITY_OVERWRITE = os.environ.get("OPENVAS_SEVERITY_OVERWRITE", "False").lower() in {"true", 1} - @dataclass class OpenVASFindingAuxData: + """Dataclass to contain all information added later to fields""" summary: str = "" @@ -21,12 +19,16 @@ def is_valid_severity(severity): return severity in valid_severity +def cleanup_openvas_text(text: str): + return text.replace("\n ", " ") + + def update_finding(finding: Finding, aux_info: OpenVASFindingAuxData): """Update finding description""" if aux_info.openvas_result: finding.steps_to_reproduce = aux_info.openvas_result if aux_info.summary: - finding.description += f"\n**Summary**: {aux_info.summary}" + finding.description += f"\n**Summary**: {cleanup_openvas_text(aux_info.summary)}" if aux_info.qod: finding.description += f"\n**QoD**: {aux_info.qod}" diff --git a/dojo/tools/openvas_v2/csv_parser.py b/dojo/tools/openvas_v2/csv_parser.py index 3db22fee454..ae55f118597 100644 --- a/dojo/tools/openvas_v2/csv_parser.py +++ b/dojo/tools/openvas_v2/csv_parser.py @@ -4,7 +4,7 @@ from dateutil.parser import parse from dojo.models import Endpoint, Finding -from dojo.tools.openvas_v2.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_finding +from dojo.tools.openvas_v2.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_finding, cleanup_openvas_text def evaluate_bool_value(column_value): @@ -84,11 +84,11 @@ def process_column_element( elif column_name == "cvss": finding.cvssv3_score = float(column_value) elif column_name == "summary": - finding.description = column_value + aux_info.summary = column_value elif column_name == "solution": - finding.mitigation = column_value + finding.mitigation = cleanup_openvas_text(column_value) elif column_name == "vulnerability insight": - finding.impact = column_value + finding.impact = cleanup_openvas_text(column_value) elif column_name == "specific result": aux_info.openvas_result = column_value elif column_name == "qod": diff --git a/dojo/tools/openvas_v2/parser.py b/dojo/tools/openvas_v2/parser.py index 13cab942373..0d7a815bd61 100644 --- a/dojo/tools/openvas_v2/parser.py +++ b/dojo/tools/openvas_v2/parser.py @@ -2,12 +2,12 @@ from dojo.tools.openvas_v2.xml_parser import OpenVASXMLParserV2 -class OpenVASParserV2: +class OpenVASV2Parser: def get_scan_types(self): - return ["OpenVAS Parser V2"] + return ["OpenVAS Parser v2"] def get_label_for_scan_types(self, scan_type): - return scan_type # no custom label for now + return scan_type def get_description_for_scan_types(self, scan_type): return "Import CSV or XML output of Greenbone OpenVAS report." diff --git a/dojo/tools/openvas_v2/xml_parser.py b/dojo/tools/openvas_v2/xml_parser.py index 51a75f8fe2c..61c83294887 100644 --- a/dojo/tools/openvas_v2/xml_parser.py +++ b/dojo/tools/openvas_v2/xml_parser.py @@ -4,7 +4,7 @@ from defusedxml import ElementTree from dojo.models import Endpoint, Finding -from dojo.tools.openvas_v2.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_finding +from dojo.tools.openvas_v2.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_finding, cleanup_openvas_text class OpenVASXMLParserV2: @@ -67,11 +67,11 @@ def process_field_element(self, field, finding: Finding, aux_info: OpenVASFindin tags = self.parse_nvt_tags(tag_field.text) summary = tags.get("summary", None) if summary: - finding.description = summary + aux_info.summary = summary impact = tags.get("impact", None) if impact: - finding.impact = impact + finding.impact = cleanup_openvas_text(impact) # parse cves refs_node = field.find("refs") @@ -108,4 +108,4 @@ def process_field_element(self, field, finding: Finding, aux_info: OpenVASFindin elif field.tag == "description": aux_info.openvas_result = field.text.strip() elif field.tag == "solution": - finding.mitigation = field.text + finding.mitigation = cleanup_openvas_text(field.text) diff --git a/unittests/tools/test_openvas_parser_v2.py b/unittests/tools/test_openvas_parser_v2.py index bd8cc7ba738..8378d7fb1e9 100644 --- a/unittests/tools/test_openvas_parser_v2.py +++ b/unittests/tools/test_openvas_parser_v2.py @@ -1,7 +1,7 @@ import io from dojo.models import Engagement, Product, Test -from dojo.tools.openvas_v2.parser import OpenVASParserV2 +from dojo.tools.openvas_v2.parser import OpenVASV2Parser from unittests.dojo_test_case import DojoTestCase, get_unit_tests_scans_path @@ -15,7 +15,7 @@ def setup_openvas_v2_test(f): test = Test() test.engagement = Engagement() test.engagement.product = Product() - parser = OpenVASParserV2() + parser = OpenVASV2Parser() findings = parser.get_findings(f, test) for finding in findings: for endpoint in finding.unsaved_endpoints: From f8a64755783d9581e3cf70761a8803612cc758fb Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Sat, 30 Aug 2025 17:08:10 +0200 Subject: [PATCH 11/15] fix lint --- dojo/tools/openvas_v2/csv_parser.py | 8 +++++++- dojo/tools/openvas_v2/xml_parser.py | 8 +++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/dojo/tools/openvas_v2/csv_parser.py b/dojo/tools/openvas_v2/csv_parser.py index ae55f118597..345fd736b5f 100644 --- a/dojo/tools/openvas_v2/csv_parser.py +++ b/dojo/tools/openvas_v2/csv_parser.py @@ -4,7 +4,13 @@ from dateutil.parser import parse from dojo.models import Endpoint, Finding -from dojo.tools.openvas_v2.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_finding, cleanup_openvas_text +from dojo.tools.openvas_v2.common import ( + OpenVASFindingAuxData, + cleanup_openvas_text, + deduplicate, + is_valid_severity, + update_finding, +) def evaluate_bool_value(column_value): diff --git a/dojo/tools/openvas_v2/xml_parser.py b/dojo/tools/openvas_v2/xml_parser.py index 61c83294887..6ff3ab48ced 100644 --- a/dojo/tools/openvas_v2/xml_parser.py +++ b/dojo/tools/openvas_v2/xml_parser.py @@ -4,7 +4,13 @@ from defusedxml import ElementTree from dojo.models import Endpoint, Finding -from dojo.tools.openvas_v2.common import OpenVASFindingAuxData, deduplicate, is_valid_severity, update_finding, cleanup_openvas_text +from dojo.tools.openvas_v2.common import ( + OpenVASFindingAuxData, + cleanup_openvas_text, + deduplicate, + is_valid_severity, + update_finding, +) class OpenVASXMLParserV2: From c1d826fe7f76d6338338e0ac25f12a8845ddc6d5 Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Sat, 30 Aug 2025 17:17:39 +0200 Subject: [PATCH 12/15] fix lint --- dojo/tools/openvas_v2/csv_parser.py | 2 +- unittests/scans/openvas/report_many_v2.csv | 614 --------------------- unittests/tools/test_openvas_parser_v2.py | 2 - 3 files changed, 1 insertion(+), 617 deletions(-) delete mode 100644 unittests/scans/openvas/report_many_v2.csv diff --git a/dojo/tools/openvas_v2/csv_parser.py b/dojo/tools/openvas_v2/csv_parser.py index 345fd736b5f..aed2cf4c1f4 100644 --- a/dojo/tools/openvas_v2/csv_parser.py +++ b/dojo/tools/openvas_v2/csv_parser.py @@ -33,7 +33,7 @@ def get_findings(self, filename, test): column_names = [column_name.lower() for column_name in next(csv_reader) if column_name] if "nvt name" not in column_names: - raise "This doesn't seem to be a valid Greenbone/ OpenVAS csv file." + raise Exception("Not a valid Greenbone/ OpenVAS csv file.") for row in csv_reader: finding = Finding(test=test, dynamic_finding=True, static_finding=False, severity="Info") diff --git a/unittests/scans/openvas/report_many_v2.csv b/unittests/scans/openvas/report_many_v2.csv deleted file mode 100644 index cd285fcc12f..00000000000 --- a/unittests/scans/openvas/report_many_v2.csv +++ /dev/null @@ -1,614 +0,0 @@ -IP,Hostname,Port,Port Protocol,CVSS,Severity,QoD,Solution Type,NVT Name,Summary,Specific Result,NVT OID,CVEs,Task ID,Task Name,Timestamp,Result ID,Impact,Solution,Affected Software/OS,Vulnerability Insight,Vulnerability Detection Method,Product Detection Result,BIDs,CERTs,Other References,Max Severity EPSS score,Max Severity EPSS percentile - 10.63.152.16,l-1ansr01.bzdtst.XX.de,8443,tcp,5.8,Medium,99,"Mitigation","HTTP Debugging Methods (TRACE/TRACK) Enabled","The remote web server supports the TRACE and/or TRACK - methods. TRACE and TRACK are HTTP methods which are used to debug web server connections.","The web server has the following HTTP methods enabled: TRACE -",1.3.6.1.4.1.25623.1.0.11213,"CVE-2003-1567,CVE-2004-2320,CVE-2004-2763,CVE-2005-3398,CVE-2006-4683,CVE-2007-3008,CVE-2008-7253,CVE-2009-2823,CVE-2010-0386,CVE-2012-2223,CVE-2014-7883",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:37+02:00,d766a82b-8765-4a23-8a1-89846bf6cbd4,"An attacker may use this flaw to trick your legitimate web - users to give him their credentials.","Disable the TRACE and TRACK methods in your web server - configuration. - - Please see the manual of your web server or the references for more information.","Web servers with enabled TRACE and/or TRACK methods.","It has been shown that web servers supporting this methods - are subject to cross-site-scripting attacks, dubbed XST for Cross-Site-Tracing, when used in - conjunction with various weaknesses in browsers.","Checks if HTTP methods such as TRACE and TRACK are - enabled and can be used. -Details: -HTTP Debugging Methods (TRACE/TRACK) Enabled -(OID: 1.3.6.1.4.1.25623.1.0.11213) -Version used: 2023-08-01T15:29:10+02:00 -","","","DFN-CERT-2021-1825,DFN-CERT-2014-1018,DFN-CERT-2010-0020,CB-K14/0981","",0.82689,0.99199 -10.63.152.16,l-1ansr01.bzdtst.XX.de,8443,tcp,5.8,Medium,99,"Mitigation","HTTP Debugging Methods (TRACE/TRACK) Enabled","The remote web server supports the TRACE and/or TRACK - methods. TRACE and TRACK are HTTP methods which are used to debug web server connections.","The web server has the following HTTP methods enabled: TRACE -",1.3.6.1.4.1.25623.1.0.11213,"CVE-2003-1567,CVE-2004-2320,CVE-2004-2763,CVE-2005-3398,CVE-2006-4683,CVE-2007-3008,CVE-2008-7253,CVE-2009-2823,CVE-2010-0386,CVE-2012-2223,CVE-2014-7883",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:37+02:00,a8b81531-f2b7-4dd5-ad11-245141e96676,"An attacker may use this flaw to trick your legitimate web - users to give him their credentials.","Disable the TRACE and TRACK methods in your web server - configuration. - - Please see the manual of your web server or the references for more information.","Web servers with enabled TRACE and/or TRACK methods.","It has been shown that web servers supporting this methods - are subject to cross-site-scripting attacks, dubbed XST for Cross-Site-Tracing, when used in - conjunction with various weaknesses in browsers.","Checks if HTTP methods such as TRACE and TRACK are - enabled and can be used. -Details: -HTTP Debugging Methods (TRACE/TRACK) Enabled -(OID: 1.3.6.1.4.1.25623.1.0.11213) -Version used: 2023-08-01T15:29:10+02:00 -","","","DFN-CERT-2021-1825,DFN-CERT-2014-1018,DFN-CERT-2010-0020,CB-K14/0981","",0.82689,0.99199 -10.63.152.41,l-1bcksrv1.bzdtst.XX.de,80,tcp,5.8,Medium,99,"Mitigation","HTTP Debugging Methods (TRACE/TRACK) Enabled","The remote web server supports the TRACE and/or TRACK - methods. TRACE and TRACK are HTTP methods which are used to debug web server connections.","The web server has the following HTTP methods enabled: TRACE -",1.3.6.1.4.1.25623.1.0.11213,"CVE-2003-1567,CVE-2004-2320,CVE-2004-2763,CVE-2005-3398,CVE-2006-4683,CVE-2007-3008,CVE-2008-7253,CVE-2009-2823,CVE-2010-0386,CVE-2012-2223,CVE-2014-7883",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:40+02:00,c921a4d-bb34-4c80-8b88-0fbfa9a51cbb,"An attacker may use this flaw to trick your legitimate web - users to give him their credentials.","Disable the TRACE and TRACK methods in your web server - configuration. - - Please see the manual of your web server or the references for more information.","Web servers with enabled TRACE and/or TRACK methods.","It has been shown that web servers supporting this methods - are subject to cross-site-scripting attacks, dubbed XST for Cross-Site-Tracing, when used in - conjunction with various weaknesses in browsers.","Checks if HTTP methods such as TRACE and TRACK are - enabled and can be used. -Details: -HTTP Debugging Methods (TRACE/TRACK) Enabled -(OID: 1.3.6.1.4.1.25623.1.0.11213) -Version used: 2023-08-01T15:29:10+02:00 -","","","DFN-CERT-2021-1825,DFN-CERT-2014-1018,DFN-CERT-2010-0020,CB-K14/0981","",0.82689,0.99199 -10.63.152.9,l-1ssms01.bzdtst.XX.de,8443,tcp,5.0,Medium,99,"Mitigation","SSL/TLS: Known Untrusted / Dangerous Certificate Authority (CA) Detection","The service is using an SSL/TLS certificate from a known - untrusted and/or dangerous certificate authority (CA).","The certificate of the remote service is signed by the following untrusted and/or dangerous CA: - -Issuer: CN=localhost,OU=ELO,O=Cisco,L=Cary,ST=NC,C=US - -Certificate details: -fingerABCnt (SHA-1) | 60222AD577271604EFEA8E31FD765F20BB71B72B -fingerABCnt (SHA-256) | E456E1F908F6C27F77B06310F797E34A081172A1CD4FA029D141B099EC429340 -issued by | CN=localhost,OU=ELO,O=Cisco,L=Cary,ST=NC,C=US -public key algorithm | RSA -public key size (bits) | 2048 -serial | 2A7EE654F54609A5 -signature algorithm | sha256WithRSAEncryption -subject | CN=localhost,OU=ELO,O=Cisco,L=Cary,ST=NC,C=US -subject alternative names (SAN) | localhost -valid from | 2025-02-10 07:23:18 UTC -valid until | 2026-02-10 07:23:18 UTC -",1.3.6.1.4.1.25623.1.0.113054,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:44:56+02:00,053e6331-484c-4d45-ba14-6c11422fc588,"An attacker could use this for man-in-the-middle (MITM) attacks, - accessing sensible data and other attacks.","Replace the SSL/TLS certificate with one signed by a trusted - CA.","","","The script reads the certificate used by the target host and - checks if it was signed by a known untrusted and/or dangerous CA. -Details: -SSL/TLS: Known Untrusted / Dangerous Certificate Authority (CA) Detection -(OID: 1.3.6.1.4.1.25623.1.0.113054) -Version used: 2024-06-14T07:05:48+02:00 -","Product: cpe:/a:ietf:transport_layer_security -Method: SSL/TLS: Collect and Report Certificate Details -(OID: 1.3.6.1.4.1.25623.1.0.103692) -","","","",, -10.63.152.21,l-1cmXX01.bzdtst.XX.de,135,tcp,5.0,Medium,80,"Mitigation","DCE/RPC and MSRPC Services Enumeration Reporting","Distributed Computing Environment / Remote Procedure Calls (DCE/RPC) or MSRPC services running - on the remote host can be enumerated by connecting on port 135 and doing the approABCate queries.","Here is the list of DCE/RPC or MSRPC services running on this host via the TCP protocol: - -Port: 49664/tcp - - UUID: 12345778-1234-abcd-ef00-0123456789ac, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49664] - Named pipe : lsass - Win32 service or process : lsass.exe - Description : SAM access - - UUID: 51a227ae-825b-41f2-b4a9-1ac9557a1018, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49664] - Annotation: Ngc Pop Key Service - - UUID: 8fb74744-b2ff-4c00-be0d-9ef9a191fe1b, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49664] - Annotation: Ngc Pop Key Service - - UUID: b25a52bf-e5dd-4f4a-aea6-8ca7272a0e86, version 2 - Endpoint: ncacn_ip_tcp:10.63.152.21[49664] - Annotation: KeyIso - -Port: 49665/tcp - - UUID: d95afe70-a6d5-4259-822e-2c84da1dXX0d, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49665] - -Port: 49666/tcp - - UUID: f6beaff7-1e19-4fbb-9f8f-b89e2018337c, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49666] - Annotation: Event log TCPIP - -Port: 49667/tcp - - UUID: 3a9ef155-691d-4449-8d05-09ad57031823, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49667] - - UUID: 8615949-83c9-4044-b424-XX363231fd0c, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49667] - -Port: 49668/tcp - - UUID: 0b6eXXfa-4a24-4fc6-8a23-942b1eca65d1, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49668] - - UUID: 12345678-1234-abcd-ef00-0123456789ab, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49668] - Named pipe : spoolss - Win32 service or process : spoolsv.exe - Description : Spooler service - - UUID: 4a452661-8290-4b36-8fbe-7f4093a94978, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49668] - - UUID: 76f03f96-cdfd-44fc-a22c-64950a001209, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49668] - - UUID: ae33069b-a2a8-46ee-a235-ddf139be281, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[49668] - -Port: 59492/tcp - - UUID: 6b5bdd1e-528c-422c-af8c-a4079be4fe48, version 1 - Endpoint: ncacn_ip_tcp:10.63.152.21[59492] - Annotation: Remote Fw APIs - -Port: 59494/tcp - - UUID: 367abb81-9844-35f1-a12-98f038001003, version 2 - Endpoint: ncacn_ip_tcp:10.63.152.21[59494] - -Note: DCE/RPC or MSRPC services running on this host locally were identified. Reporting this list is not enabled by default due to the possible large size of this list. See the script preferences to enable this reporting. -",1.3.6.1.4.1.25623.1.0.10736,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:08:49+02:00,5e84cc6e-df7e-4beb-981f-5f9c22f6482e,"An attacker may use this fact to gain more knowledge - about the remote host.","Filter incoming traffic to this ports.","",""," -Details: -DCE/RPC and MSRPC Services Enumeration Reporting -(OID: 1.3.6.1.4.1.25623.1.0.10736) -Version used: 2022-06-03T12:17:07+02:00 -","","","","",, -10.63.152.41,l-1bcksrv1.bzdtst.XX.de,80,tcp,4.8,Medium,80,"Workaround","Cleartext Transmission of Sensitive Information via HTTP","The host / application transmits sensitive information (username, passwords) in - cleartext via HTTP.","The following URLs requires Basic Authentication (URL:realm name): - -http://l-1bcksrv1.bzdtst.XX.de/:""Protected"" -",1.3.6.1.4.1.25623.1.0.108440,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:40+02:00,12e0104e-a1da-4819-8c11-ef4c57d0e561,"An attacker could use this situation to compromise or eavesdrop on the - HTTP communication between the client and the server using a man-in-the-middle attack to get access to - sensitive data like usernames or passwords.","Enforce the transmission of sensitive data via an encrypted SSL/TLS connection. - Additionally make sure the host / application is redirecting all users to the secured SSL/TLS connection before - allowing to input sensitive data into the mentioned functions.","Hosts / applications which doesn't enforce the transmission of sensitive data via an - encrypted SSL/TLS connection.","","Evaluate previous collected information and check if the host / application is not - enforcing the transmission of sensitive data via an encrypted SSL/TLS connection. - - The script is currently checking the following: - - - HTTP Basic Authentication (Basic Auth) - - - HTTP Forms (e.g. Login) with input field of type 'password' -Details: -Cleartext Transmission of Sensitive Information via HTTP -(OID: 1.3.6.1.4.1.25623.1.0.108440) -Version used: 2023-09-07T07:05:21+02:00 -","","","","",, -10.63.152.202,l-1esx02-b.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 -Fixed version: 3.4.26 - -",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:46+02:00,5c04507c-1d00-4327-b50f-8f7fb23d6de9,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) - associated to a lease when Keys parameter is true, even a user doesn't have read permission to - the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. -Details: -etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... -(OID: 1.3.6.1.4.1.25623.1.0.149673) -Version used: 2023-10-12T07:05:32+02:00 -","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 -10.63.152.203,l-1esx03-b.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 -Fixed version: 3.4.26 - -",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:49:35+02:00,38a2f8a7-c098-4ac7-af84-85f0fbf206f9,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) - associated to a lease when Keys parameter is true, even a user doesn't have read permission to - the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. -Details: -etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... -(OID: 1.3.6.1.4.1.25623.1.0.149673) -Version used: 2023-10-12T07:05:32+02:00 -","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 -10.63.152.204,l-1esx04-b.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 -Fixed version: 3.4.26 - -",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:10:24+02:00,66cf2b76-6441-4ece-b2af-6761e387df0c,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) - associated to a lease when Keys parameter is true, even a user doesn't have read permission to - the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. -Details: -etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... -(OID: 1.3.6.1.4.1.25623.1.0.149673) -Version used: 2023-10-12T07:05:32+02:00 -","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 -10.63.152.102,l-1esx02.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 -Fixed version: 3.4.26 - -",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:01:32+02:00,ff414d86-3478-415a-a349-bc91910e51b9,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) - associated to a lease when Keys parameter is true, even a user doesn't have read permission to - the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. -Details: -etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... -(OID: 1.3.6.1.4.1.25623.1.0.149673) -Version used: 2023-10-12T07:05:32+02:00 -","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 -10.63.152.103,l-1esx03.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 -Fixed version: 3.4.26 - -",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:06:08+02:00,e040081f-bab5-470e-8e50-f7f63a61b1c2,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) - associated to a lease when Keys parameter is true, even a user doesn't have read permission to - the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. -Details: -etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... -(OID: 1.3.6.1.4.1.25623.1.0.149673) -Version used: 2023-10-12T07:05:32+02:00 -","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 -10.63.152.104,l-1esx04.bzdtst.XX.de,2379,tcp,4.3,Medium,80,"VendorFix","etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g-rcw5-8298)","etcd is prone to an information disclosure vulnerability.","Installed version: 3.4.25 -Fixed version: 3.4.26 - -",1.3.6.1.4.1.25623.1.0.149673,"CVE-2023-32082",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:59:13+02:00,1188c31e-8e34-4520-b4cb-cc1b5ef62e21,"","Update to version 3.4.26, 3.5.9 or later.","etcd ABCor to version 3.4.26 and version 3.5.x through 3.5.8.","LeaseTimeToLive API allows access to key names (not value) - associated to a lease when Keys parameter is true, even a user doesn't have read permission to - the keys. The impact is limited to a cluster which enables auth (RBAC).","Checks if a vulnerable version is present on the target host. -Details: -etcd < 3.4.26, 3.5.x < 3.5.9 Information Disclosure Vulnerability (GHSA-3p4g... -(OID: 1.3.6.1.4.1.25623.1.0.149673) -Version used: 2023-10-12T07:05:32+02:00 -","","","DFN-CERT-2023-1298,WID-SEC-2023-1373","",0.00293,0.49789 -10.63.152.100,l-1vcsa01.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows - to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. - -The following timestamps were retrieved with a delay of 1 seconds in-between: -Packet 1: 1497783025 -Packet 2: 1497784105 -",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:46:45+02:00,2eb74ccf-bfb8-4e9a-8df3-67b70bd52ba1,"A side effect of this feature is that the uptime of the remote - host can sometimes be computed.","To disable TCP timestamps on linux add the line - 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at - runtime. - - To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' - - Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. - - The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when - initiating TCP connections, but use them if the TCP peer that is initiating communication includes - them in their synchronize (SYN) segment. - - See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by - RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in - between to the target IP. The responses are searched for a timestamps. If found, the timestamps - are reported. -Details: -TCP Timestamps Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.80091) -Version used: 2023-12-15T17:10:08+02:00 -","","","","",, -10.63.152.102,l-1esx02.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows - to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. - -The following timestamps were retrieved with a delay of 1 seconds in-between: -Packet 1: 2895610065 -Packet 2: 405332881 -",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:01:32+02:00,54dc8d63-eb80-4196-89b4-fa9d6387ee2c,"A side effect of this feature is that the uptime of the remote - host can sometimes be computed.","To disable TCP timestamps on linux add the line - 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at - runtime. - - To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' - - Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. - - The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when - initiating TCP connections, but use them if the TCP peer that is initiating communication includes - them in their synchronize (SYN) segment. - - See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by - RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in - between to the target IP. The responses are searched for a timestamps. If found, the timestamps - are reported. -Details: -TCP Timestamps Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.80091) -Version used: 2023-12-15T17:10:08+02:00 -","","","","",, -10.63.152.103,l-1esx03.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows - to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. - -The following timestamps were retrieved with a delay of 1 seconds in-between: -Packet 1: 2934590030 -Packet 2: 1131840991 -",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:06:08+02:00,b1581415-59e2-4847-b867-39d1878c233e,"A side effect of this feature is that the uptime of the remote - host can sometimes be computed.","To disable TCP timestamps on linux add the line - 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at - runtime. - - To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' - - Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. - - The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when - initiating TCP connections, but use them if the TCP peer that is initiating communication includes - them in their synchronize (SYN) segment. - - See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by - RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in - between to the target IP. The responses are searched for a timestamps. If found, the timestamps - are reported. -Details: -TCP Timestamps Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.80091) -Version used: 2023-12-15T17:10:08+02:00 -","","","","",, -10.63.152.202,l-1esx02-b.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows - to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. - -The following timestamps were retrieved with a delay of 1 seconds in-between: -Packet 1: 1740125501 -Packet 2: 2510433761 -",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:46+02:00,73fa94c7-5b05-4196-8c7f-ade387379867,"A side effect of this feature is that the uptime of the remote - host can sometimes be computed.","To disable TCP timestamps on linux add the line - 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at - runtime. - - To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' - - Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. - - The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when - initiating TCP connections, but use them if the TCP peer that is initiating communication includes - them in their synchronize (SYN) segment. - - See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by - RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in - between to the target IP. The responses are searched for a timestamps. If found, the timestamps - are reported. -Details: -TCP Timestamps Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.80091) -Version used: 2023-12-15T17:10:08+02:00 -","","","","",, -10.63.152.203,l-1esx03-b.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows - to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. - -The following timestamps were retrieved with a delay of 1 seconds in-between: -Packet 1: 54910807 -Packet 2: 820590328 -",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:49:35+02:00,a6734c0d-1fd1-49ce-8b00-0271a7b8cb62,"A side effect of this feature is that the uptime of the remote - host can sometimes be computed.","To disable TCP timestamps on linux add the line - 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at - runtime. - - To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' - - Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. - - The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when - initiating TCP connections, but use them if the TCP peer that is initiating communication includes - them in their synchronize (SYN) segment. - - See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by - RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in - between to the target IP. The responses are searched for a timestamps. If found, the timestamps - are reported. -Details: -TCP Timestamps Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.80091) -Version used: 2023-12-15T17:10:08+02:00 -","","","","",, -10.63.152.104,l-1esx04.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows - to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. - -The following timestamps were retrieved with a delay of 1 seconds in-between: -Packet 1: 2684704561 -Packet 2: 2831912580 -",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:59:13+02:00,590XX466-856e-43b7-931e-05648f01942d,"A side effect of this feature is that the uptime of the remote - host can sometimes be computed.","To disable TCP timestamps on linux add the line - 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at - runtime. - - To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' - - Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. - - The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when - initiating TCP connections, but use them if the TCP peer that is initiating communication includes - them in their synchronize (SYN) segment. - - See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by - RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in - between to the target IP. The responses are searched for a timestamps. If found, the timestamps - are reported. -Details: -TCP Timestamps Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.80091) -Version used: 2023-12-15T17:10:08+02:00 -","","","","",, -10.63.152.204,l-1esx04-b.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows - to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. - -The following timestamps were retrieved with a delay of 1 seconds in-between: -Packet 1: 2272643113 -Packet 2: 1342986068 -",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:10:24+02:00,70e1a404-4ed0-4823-8d8e-d277560123e7,"A side effect of this feature is that the uptime of the remote - host can sometimes be computed.","To disable TCP timestamps on linux add the line - 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at - runtime. - - To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' - - Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. - - The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when - initiating TCP connections, but use them if the TCP peer that is initiating communication includes - them in their synchronize (SYN) segment. - - See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by - RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in - between to the target IP. The responses are searched for a timestamps. If found, the timestamps - are reported. -Details: -TCP Timestamps Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.80091) -Version used: 2023-12-15T17:10:08+02:00 -","","","","",, -10.63.152.201,l-1esx01-b.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows - to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. - -The following timestamps were retrieved with a delay of 1 seconds in-between: -Packet 1: 4279311885 -Packet 2: 120745172 -",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:43+02:00,24aa905b-b8d4-4a49-9b1d-7d6797c4cee2,"A side effect of this feature is that the uptime of the remote - host can sometimes be computed.","To disable TCP timestamps on linux add the line - 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at - runtime. - - To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' - - Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. - - The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when - initiating TCP connections, but use them if the TCP peer that is initiating communication includes - them in their synchronize (SYN) segment. - - See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by - RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in - between to the target IP. The responses are searched for a timestamps. If found, the timestamps - are reported. -Details: -TCP Timestamps Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.80091) -Version used: 2023-12-15T17:10:08+02:00 -","","","","",, -10.63.152.101,l-1esx01.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows - to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. - -The following timestamps were retrieved with a delay of 1 seconds in-between: -Packet 1: 3630835335 -Packet 2: 2057814760 -",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T04:02:37+02:00,2b9ffca6-7134-467e-8c95-a385b8cfd20e,"A side effect of this feature is that the uptime of the remote - host can sometimes be computed.","To disable TCP timestamps on linux add the line - 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at - runtime. - - To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' - - Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. - - The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when - initiating TCP connections, but use them if the TCP peer that is initiating communication includes - them in their synchronize (SYN) segment. - - See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by - RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in - between to the target IP. The responses are searched for a timestamps. If found, the timestamps - are reported. -Details: -TCP Timestamps Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.80091) -Version used: 2023-12-15T17:10:08+02:00 -","","","","",, -10.63.152.21,l-1cmXX01.bzdtst.XX.de,,,2.6,Low,80,"Mitigation","TCP Timestamps Information Disclosure","The remote host implements TCP timestamps and therefore allows - to compute the uptime.","It was detected that the host implements RFC1323/RFC7323. - -The following timestamps were retrieved with a delay of 1 seconds in-between: -Packet 1: 762318062 -Packet 2: 762319141 -",1.3.6.1.4.1.25623.1.0.80091,"",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:08:49+02:00,423b0e90-7407-4d6e-b0fc-3a4039635000,"A side effect of this feature is that the uptime of the remote - host can sometimes be computed.","To disable TCP timestamps on linux add the line - 'net.ipv4.tcp_timestamps = 0' to /etc/sysctl.conf. Execute 'sysctl -p' to apply the settings at - runtime. - - To disable TCP timestamps on Windows execute 'netsh int tcp set global timestamps=disabled' - - Starting with Windows Server 2008 and Vista, the timestamp can not be completely disabled. - - The default behavior of the TCP/IP stack on this Systems is to not use the Timestamp options when - initiating TCP connections, but use them if the TCP peer that is initiating communication includes - them in their synchronize (SYN) segment. - - See the references for more information.","TCP implementations that implement RFC1323/RFC7323.","The remote host implements TCP timestamps, as defined by - RFC1323/RFC7323.","Special IP packets are forged and sent with a little delay in - between to the target IP. The responses are searched for a timestamps. If found, the timestamps - are reported. -Details: -TCP Timestamps Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.80091) -Version used: 2023-12-15T17:10:08+02:00 -","","","","",, -10.63.152.16,l-1ansr01.bzdtst.XX.de,,,2.1,Low,80,"Mitigation","ICMP Timestamp Reply Information Disclosure","The remote host responded to an ICMP timestamp request.","The following response / ICMP packet has been received: -- ICMP Type: 14 -- ICMP Code: 0 -",1.3.6.1.4.1.25623.1.0.103190,"CVE-1999-0524",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:37+02:00,c670f102-1614-40fd-b1e1-9df7751a98d8,"This information could theoretically be used to exploit weak - time-based random number generators in other services.","Various mitigations are possible: - - - Disable the support for ICMP timestamp on the remote host completely - - - Protect the remote host by a firewall, and block ICMP packets passing through the firewall in - either direction (either completely or only for untrusted networks)","","The Timestamp Reply is an ICMP message which replies to a - Timestamp message. It consists of the originating timestamp sent by the sender of the Timestamp as - well as a receive timestamp and a transmit timestamp.","Sends an ICMP Timestamp (Type 13) request and checks if a - Timestamp Reply (Type 14) is received. -Details: -ICMP Timestamp Reply Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.103190) -Version used: 2025-01-21T06:37:33+02:00 -","","","DFN-CERT-2014-0658,CB-K15/1514,CB-K14/0632","",0.00460,0.61398 -10.63.152.199,l-1moni04.bzdtst.XX.de,,,2.1,Low,80,"Mitigation","ICMP Timestamp Reply Information Disclosure","The remote host responded to an ICMP timestamp request.","The following response / ICMP packet has been received: -- ICMP Type: 14 -- ICMP Code: 0 -",1.3.6.1.4.1.25623.1.0.103190,"CVE-1999-0524",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:06:22+02:00,7fd03e88-91c5-493f-a5dd-21f1b424c309,"This information could theoretically be used to exploit weak - time-based random number generators in other services.","Various mitigations are possible: - - - Disable the support for ICMP timestamp on the remote host completely - - - Protect the remote host by a firewall, and block ICMP packets passing through the firewall in - either direction (either completely or only for untrusted networks)","","The Timestamp Reply is an ICMP message which replies to a - Timestamp message. It consists of the originating timestamp sent by the sender of the Timestamp as - well as a receive timestamp and a transmit timestamp.","Sends an ICMP Timestamp (Type 13) request and checks if a - Timestamp Reply (Type 14) is received. -Details: -ICMP Timestamp Reply Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.103190) -Version used: 2025-01-21T06:37:33+02:00 -","","","DFN-CERT-2014-0658,CB-K15/1514,CB-K14/0632","",0.00460,0.61398 -10.63.152.41,l-1bcksrv1.bzdtst.XX.de,,,2.1,Low,80,"Mitigation","ICMP Timestamp Reply Information Disclosure","The remote host responded to an ICMP timestamp request.","The following response / ICMP packet has been received: -- ICMP Type: 14 -- ICMP Code: 0 -",1.3.6.1.4.1.25623.1.0.103190,"CVE-1999-0524",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T02:02:40+02:00,23e24245-749e-4c1d-a7c9-1eb14XXbb550,"This information could theoretically be used to exploit weak - time-based random number generators in other services.","Various mitigations are possible: - - - Disable the support for ICMP timestamp on the remote host completely - - - Protect the remote host by a firewall, and block ICMP packets passing through the firewall in - either direction (either completely or only for untrusted networks)","","The Timestamp Reply is an ICMP message which replies to a - Timestamp message. It consists of the originating timestamp sent by the sender of the Timestamp as - well as a receive timestamp and a transmit timestamp.","Sends an ICMP Timestamp (Type 13) request and checks if a - Timestamp Reply (Type 14) is received. -Details: -ICMP Timestamp Reply Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.103190) -Version used: 2025-01-21T06:37:33+02:00 -","","","DFN-CERT-2014-0658,CB-K15/1514,CB-K14/0632","",0.00460,0.61398 -10.63.152.9,l-1ssms01.bzdtst.XX.de,,,2.1,Low,80,"Mitigation","ICMP Timestamp Reply Information Disclosure","The remote host responded to an ICMP timestamp request.","The following response / ICMP packet has been received: -- ICMP Type: 14 -- ICMP Code: 0 -",1.3.6.1.4.1.25623.1.0.103190,"CVE-1999-0524",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T03:44:56+02:00,76cb06b0-29a2-493a-a895-add4b3bbd5bb,"This information could theoretically be used to exploit weak - time-based random number generators in other services.","Various mitigations are possible: - - - Disable the support for ICMP timestamp on the remote host completely - - - Protect the remote host by a firewall, and block ICMP packets passing through the firewall in - either direction (either completely or only for untrusted networks)","","The Timestamp Reply is an ICMP message which replies to a - Timestamp message. It consists of the originating timestamp sent by the sender of the Timestamp as - well as a receive timestamp and a transmit timestamp.","Sends an ICMP Timestamp (Type 13) request and checks if a - Timestamp Reply (Type 14) is received. -Details: -ICMP Timestamp Reply Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.103190) -Version used: 2025-01-21T06:37:33+02:00 -","","","DFN-CERT-2014-0658,CB-K15/1514,CB-K14/0632","",0.00460,0.61398 -10.63.152.21,l-1cmXX01.bzdtst.XX.de,,,2.1,Low,80,"Mitigation","ICMP Timestamp Reply Information Disclosure","The remote host responded to an ICMP timestamp request.","The following response / ICMP packet has been received: -- ICMP Type: 14 -- ICMP Code: 0 -",1.3.6.1.4.1.25623.1.0.103190,"CVE-1999-0524",821703bb-9f4d-4a98-ab67-1849b918166c,"ABC-l-1_ABC-Server",2025-04-13T05:08:49+02:00,6e8e6ab2-5ae1-44e8-b6bf-f23a50674671,"This information could theoretically be used to exploit weak - time-based random number generators in other services.","Various mitigations are possible: - - - Disable the support for ICMP timestamp on the remote host completely - - - Protect the remote host by a firewall, and block ICMP packets passing through the firewall in - either direction (either completely or only for untrusted networks)","","The Timestamp Reply is an ICMP message which replies to a - Timestamp message. It consists of the originating timestamp sent by the sender of the Timestamp as - well as a receive timestamp and a transmit timestamp.","Sends an ICMP Timestamp (Type 13) request and checks if a - Timestamp Reply (Type 14) is received. -Details: -ICMP Timestamp Reply Information Disclosure -(OID: 1.3.6.1.4.1.25623.1.0.103190) -Version used: 2025-01-21T06:37:33+02:00 -","","","DFN-CERT-2014-0658,CB-K15/1514,CB-K14/0632","",0.00460,0.61398 diff --git a/unittests/tools/test_openvas_parser_v2.py b/unittests/tools/test_openvas_parser_v2.py index 8378d7fb1e9..1a549b41d26 100644 --- a/unittests/tools/test_openvas_parser_v2.py +++ b/unittests/tools/test_openvas_parser_v2.py @@ -1,5 +1,3 @@ -import io - from dojo.models import Engagement, Product, Test from dojo.tools.openvas_v2.parser import OpenVASV2Parser from unittests.dojo_test_case import DojoTestCase, get_unit_tests_scans_path From 204ffdcb34e218c9d4a9aec96b83e04d2c75302b Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Sat, 30 Aug 2025 17:27:47 +0200 Subject: [PATCH 13/15] fixed lint exception --- dojo/tools/openvas_v2/csv_parser.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dojo/tools/openvas_v2/csv_parser.py b/dojo/tools/openvas_v2/csv_parser.py index aed2cf4c1f4..599a281bb33 100644 --- a/dojo/tools/openvas_v2/csv_parser.py +++ b/dojo/tools/openvas_v2/csv_parser.py @@ -33,7 +33,8 @@ def get_findings(self, filename, test): column_names = [column_name.lower() for column_name in next(csv_reader) if column_name] if "nvt name" not in column_names: - raise Exception("Not a valid Greenbone/ OpenVAS csv file.") + msg = "Invalid OpenVAS csv file" + raise ValueError(msg) for row in csv_reader: finding = Finding(test=test, dynamic_finding=True, static_finding=False, severity="Info") From 06963858556df9f36794cfb276ffbb4a5caa037f Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Tue, 16 Sep 2025 17:43:19 +0200 Subject: [PATCH 14/15] fix tests --- .../parsers/file/openvas.md | 6 + .../parsers/file/openvas_v2.md | 16 --- dojo/settings/settings.dist.py | 2 +- dojo/tools/factory.py | 7 +- dojo/tools/openvas/parser.py | 24 +++- dojo/tools/openvas/parser_v1/__init__.py | 0 .../openvas/{ => parser_v1}/csv_parser.py | 0 .../openvas/{ => parser_v1}/xml_parser.py | 0 dojo/tools/openvas/parser_v2/__init__.py | 0 .../parser_v2}/common.py | 25 +--- .../parser_v2}/csv_parser.py | 2 +- .../parser_v2}/xml_parser.py | 2 +- dojo/tools/openvas_v2/__init__.py | 1 - dojo/tools/openvas_v2/parser.py | 20 --- unittests/tools/test_openvas_parser.py | 114 +++++++++++++++++- unittests/tools/test_openvas_parser_v2.py | 108 ----------------- 16 files changed, 153 insertions(+), 174 deletions(-) delete mode 100644 docs/content/en/connecting_your_tools/parsers/file/openvas_v2.md create mode 100644 dojo/tools/openvas/parser_v1/__init__.py rename dojo/tools/openvas/{ => parser_v1}/csv_parser.py (100%) rename dojo/tools/openvas/{ => parser_v1}/xml_parser.py (100%) create mode 100644 dojo/tools/openvas/parser_v2/__init__.py rename dojo/tools/{openvas_v2 => openvas/parser_v2}/common.py (80%) rename dojo/tools/{openvas_v2 => openvas/parser_v2}/csv_parser.py (98%) rename dojo/tools/{openvas_v2 => openvas/parser_v2}/xml_parser.py (98%) delete mode 100644 dojo/tools/openvas_v2/__init__.py delete mode 100644 dojo/tools/openvas_v2/parser.py delete mode 100644 unittests/tools/test_openvas_parser_v2.py diff --git a/docs/content/en/connecting_your_tools/parsers/file/openvas.md b/docs/content/en/connecting_your_tools/parsers/file/openvas.md index b0153900161..b7165ad7384 100644 --- a/docs/content/en/connecting_your_tools/parsers/file/openvas.md +++ b/docs/content/en/connecting_your_tools/parsers/file/openvas.md @@ -15,3 +15,9 @@ By default, DefectDojo identifies duplicate Findings using these [hashcode field - line - file path - description + +### Parser V2 Changes +Version 2 comes with multiple improvments: +- Increased parsing Consistensy between the xml and csv parser +- Combined findings where the only differences are in fields that can’t be rehashed due to inconsistent values between scans e.g fields with timestamps or packet ids. +- Parser now combines multiple identical findings with different endpoints into one findings with multiple endpoints (instead of multiple findings with one endpoint each) diff --git a/docs/content/en/connecting_your_tools/parsers/file/openvas_v2.md b/docs/content/en/connecting_your_tools/parsers/file/openvas_v2.md deleted file mode 100644 index a39bd735fc1..00000000000 --- a/docs/content/en/connecting_your_tools/parsers/file/openvas_v2.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -title: "OpenVAS Parser V2" -toc_hide: true ---- -This is version 2 of the OpenVAS / Greenbone parser. -You can upload your scanns in eighter csv or xml format. For the parser to recognize the difference they have to end with .csv or .xml. - -### V2 Changes -Version 2 comes with multiple improvments TODO: -- Using using unique_id_from_tool for deduplication -- Increased parsing Consistensy between the xml and csv parser -- Combined findings where the only differences are in fields that can’t be rehashed due to inconsistent values between scans e.g fields with timestamps or packet ids. -- Parser now combines multiple identical findings with different endpoints into one findings with multiple endpoints (instead of multiple findings with one endpoint each) - -### Sample Scan Data -Sample OpenVAS scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/openvas). diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 4e52b029c43..67a7cc38f74 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1356,7 +1356,7 @@ def saml2_attrib_map_format(din): "Qualys Hacker Guardian Scan": ["title", "severity", "description"], "Cyberwatch scan (Galeax)": ["title", "description", "severity"], "Cycognito Scan": ["title", "severity"], - "OpenVAS Parser v2": ["title", "unique_id_from_tool", "vuln_id_from_tool"], + "OpenVAS Parser v2": ["title", "severity", "vuln_id_from_tool"], } # Override the hardcoded settings here via the env var diff --git a/dojo/tools/factory.py b/dojo/tools/factory.py index f5c100266a1..2f54e9d7592 100644 --- a/dojo/tools/factory.py +++ b/dojo/tools/factory.py @@ -119,7 +119,12 @@ def requires_tool_type(scan_type): module = import_module(f"dojo.tools.{module_name}.parser") for attribute_name in dir(module): attribute = getattr(module, attribute_name) - if isclass(attribute) and attribute_name.lower() == module_name.replace("_", "") + "parser": + # Allow parser class names with optional v[number] suffix (e.g., OpenVASParser, OpenVASParserV2) + expected_base = module_name.replace("_", "") + "parser" + if isclass(attribute) and ( + attribute_name.lower() == expected_base or + re.match(rf"^{re.escape(expected_base)}v\d+$", attribute_name.lower()) + ): register(attribute) except: logger.exception("failed to load %s", module_name) diff --git a/dojo/tools/openvas/parser.py b/dojo/tools/openvas/parser.py index 9f366c17694..ebf1b488213 100644 --- a/dojo/tools/openvas/parser.py +++ b/dojo/tools/openvas/parser.py @@ -1,5 +1,7 @@ -from dojo.tools.openvas.csv_parser import OpenVASCSVParser -from dojo.tools.openvas.xml_parser import OpenVASXMLParser +from dojo.tools.openvas.parser_v1.csv_parser import OpenVASCSVParser +from dojo.tools.openvas.parser_v1.xml_parser import OpenVASXMLParser +from dojo.tools.openvas.parser_v2.csv_parser import OpenVASCSVParserV2 +from dojo.tools.openvas.parser_v2.xml_parser import OpenVASXMLParserV2 class OpenVASParser: @@ -18,3 +20,21 @@ def get_findings(self, filename, test): if str(filename.name).endswith(".xml"): return OpenVASXMLParser().get_findings(filename, test) return None + + +class OpenVASParserV2: + def get_scan_types(self): + return ["OpenVAS Parser v2"] + + def get_label_for_scan_types(self, scan_type): + return scan_type + + def get_description_for_scan_types(self, scan_type): + return "Import CSV or XML output of Greenbone OpenVAS report." + + def get_findings(self, filename, test): + if str(filename.name).endswith(".csv"): + return OpenVASCSVParserV2().get_findings(filename, test) + if str(filename.name).endswith(".xml"): + return OpenVASXMLParserV2().get_findings(filename, test) + return None diff --git a/dojo/tools/openvas/parser_v1/__init__.py b/dojo/tools/openvas/parser_v1/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/dojo/tools/openvas/csv_parser.py b/dojo/tools/openvas/parser_v1/csv_parser.py similarity index 100% rename from dojo/tools/openvas/csv_parser.py rename to dojo/tools/openvas/parser_v1/csv_parser.py diff --git a/dojo/tools/openvas/xml_parser.py b/dojo/tools/openvas/parser_v1/xml_parser.py similarity index 100% rename from dojo/tools/openvas/xml_parser.py rename to dojo/tools/openvas/parser_v1/xml_parser.py diff --git a/dojo/tools/openvas/parser_v2/__init__.py b/dojo/tools/openvas/parser_v2/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/dojo/tools/openvas_v2/common.py b/dojo/tools/openvas/parser_v2/common.py similarity index 80% rename from dojo/tools/openvas_v2/common.py rename to dojo/tools/openvas/parser_v2/common.py index 80c7ed00b58..3325d95969d 100644 --- a/dojo/tools/openvas_v2/common.py +++ b/dojo/tools/openvas/parser_v2/common.py @@ -36,8 +36,6 @@ def update_finding(finding: Finding, aux_info: OpenVASFindingAuxData): def deduplicate(dupes: dict[str, Finding], finding: Finding): """Combine multiple openvas findings into one defectdojo finding with multiple endpoints""" finding_hash = dedup_finding_hash(finding) - # deliberately missuse unique_id_from_tool to save some original values - finding.unique_id_from_tool = id_from_tool_finding_hash(finding) if finding_hash not in dupes: dupes[finding_hash] = finding @@ -70,28 +68,13 @@ def deduplicate(dupes: dict[str, Finding], finding: Finding): def id_from_tool_finding_hash(finding: Finding): """Generate a hash that complements final hash generating outside of this parser""" endpoint = finding.unsaved_endpoints[0] - hash_data = [ - str(endpoint.protocol), - str(endpoint.userinfo), - str(endpoint.port), # keep findings on different port seperate as it may be different applications - str(endpoint.path), - str(endpoint.fragment), - finding.severity, # allows changing severity of finding after import - ] - return hashlib.sha256("|".join(hash_data).encode("utf-8")).hexdigest() + if "endpoints" in HASHCODE_FIELDS_PER_SCANNER["OpenVAS Parser v2"]: + pass -def dedup_finding_hash(finding: Finding): - """Generate a hash for a finding that is used for deduplication of findings inside the current report""" - endpoint = finding.unsaved_endpoints[0] hash_data = [ str(endpoint.protocol), - str(endpoint.userinfo), - str(endpoint.port), - str(endpoint.path), - str(endpoint.fragment), - finding.title, - finding.vuln_id_from_tool, - finding.severity, + str(endpoint.port), # keep findings on different port seperate as it may be different applications + finding.severity, # allows changing severity of finding after import ] return hashlib.sha256("|".join(hash_data).encode("utf-8")).hexdigest() diff --git a/dojo/tools/openvas_v2/csv_parser.py b/dojo/tools/openvas/parser_v2/csv_parser.py similarity index 98% rename from dojo/tools/openvas_v2/csv_parser.py rename to dojo/tools/openvas/parser_v2/csv_parser.py index 599a281bb33..db21819f398 100644 --- a/dojo/tools/openvas_v2/csv_parser.py +++ b/dojo/tools/openvas/parser_v2/csv_parser.py @@ -4,7 +4,7 @@ from dateutil.parser import parse from dojo.models import Endpoint, Finding -from dojo.tools.openvas_v2.common import ( +from dojo.tools.openvas.parser_v2.common import ( OpenVASFindingAuxData, cleanup_openvas_text, deduplicate, diff --git a/dojo/tools/openvas_v2/xml_parser.py b/dojo/tools/openvas/parser_v2/xml_parser.py similarity index 98% rename from dojo/tools/openvas_v2/xml_parser.py rename to dojo/tools/openvas/parser_v2/xml_parser.py index 6ff3ab48ced..7c410452c9e 100644 --- a/dojo/tools/openvas_v2/xml_parser.py +++ b/dojo/tools/openvas/parser_v2/xml_parser.py @@ -4,7 +4,7 @@ from defusedxml import ElementTree from dojo.models import Endpoint, Finding -from dojo.tools.openvas_v2.common import ( +from dojo.tools.openvas.parser_v2.common import ( OpenVASFindingAuxData, cleanup_openvas_text, deduplicate, diff --git a/dojo/tools/openvas_v2/__init__.py b/dojo/tools/openvas_v2/__init__.py deleted file mode 100644 index 3ad798a42b3..00000000000 --- a/dojo/tools/openvas_v2/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__author__ = "manuel-sommer" diff --git a/dojo/tools/openvas_v2/parser.py b/dojo/tools/openvas_v2/parser.py deleted file mode 100644 index 0d7a815bd61..00000000000 --- a/dojo/tools/openvas_v2/parser.py +++ /dev/null @@ -1,20 +0,0 @@ -from dojo.tools.openvas_v2.csv_parser import OpenVASCSVParserV2 -from dojo.tools.openvas_v2.xml_parser import OpenVASXMLParserV2 - - -class OpenVASV2Parser: - def get_scan_types(self): - return ["OpenVAS Parser v2"] - - def get_label_for_scan_types(self, scan_type): - return scan_type - - def get_description_for_scan_types(self, scan_type): - return "Import CSV or XML output of Greenbone OpenVAS report." - - def get_findings(self, filename, test): - if str(filename.name).endswith(".csv"): - return OpenVASCSVParserV2().get_findings(filename, test) - if str(filename.name).endswith(".xml"): - return OpenVASXMLParserV2().get_findings(filename, test) - return None diff --git a/unittests/tools/test_openvas_parser.py b/unittests/tools/test_openvas_parser.py index 7ec8cf7ebf2..b39505c15a9 100644 --- a/unittests/tools/test_openvas_parser.py +++ b/unittests/tools/test_openvas_parser.py @@ -1,8 +1,115 @@ from dojo.models import Engagement, Product, Test -from dojo.tools.openvas.parser import OpenVASParser +from dojo.tools.openvas.parser import OpenVASParser, OpenVASParserV2 from unittests.dojo_test_case import DojoTestCase, get_unit_tests_scans_path +# V2 Parser tests +def openvas_open(file): + """Helper to get file handle to openvas test files""" + return (get_unit_tests_scans_path("openvas") / file).open(encoding="utf-8") + + +def setup_openvas_v2_test(f): + """Setup helper for general openvas_v2 test setup""" + test = Test() + test.engagement = Engagement() + test.engagement.product = Product() + parser = OpenVASParserV2() + findings = parser.get_findings(f, test) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + return findings + + +class TestOpenVASParserV2(DojoTestCase): + # test empty cases + def test_openvas_csv_no_vuln(self): + """Ensure that an empty report does not throw and error and reports 0 findings""" + with openvas_open("no_vuln.csv") as f: + findings = setup_openvas_v2_test(f) + self.assertEqual(0, len(findings)) + + def test_openvas_xml_no_vuln(self): + """Ensure that an empty report does not throw and error and reports 0 findings""" + with openvas_open("no_vuln.xml") as f: + findings = setup_openvas_v2_test(f) + self.assertEqual(0, len(findings)) + + def test_openvas_parser_csv_detail(self): + """Ensure finding contains report data as expected""" + with openvas_open("report_detail_v2.csv") as f: + findings = setup_openvas_v2_test(f) + + # ensure single finding + self.assertEqual(len(findings), 1) + finding = findings[0] + + # general finding info tests + self.assertEqual("Microsoft Windows Multiple Vulnerabilities (KB5062557)", finding.title) + self.assertEqual("High", finding.severity) # OpenVAS report Critical findings as High + self.assertEqual(9.8, finding.cvssv3_score) + + # vulnerability id tests + self.assertEqual(finding.vuln_id_from_tool, "1.3.6.1.4.1.25623.1.0.836484") + self.assertEqual(finding.unsaved_vulnerability_ids[1], "CVE-2025-48823") + self.assertEqual(93, len(finding.unsaved_vulnerability_ids)) + + # endpoint tests + self.assertEqual(1, len(finding.unsaved_endpoints)) + self.assertEqual("server99", finding.unsaved_endpoints[0].host) + # this is example data normaly tested finding does not include this + self.assertEqual(42, finding.unsaved_endpoints[0].port) + self.assertEqual("tcp", finding.unsaved_endpoints[0].protocol) + + def test_openvas_parser_csv_xml_parity(self): + """Ensure xml and csv parser parse data that is the same between report in the same way""" + with openvas_open("report_detail_v2.csv") as f: + findings_csv = setup_openvas_v2_test(f) + with openvas_open("report_detail_v2.xml") as f: + findings_xml = setup_openvas_v2_test(f) + + f_xml = findings_xml[0] + f_csv = findings_csv[0] + + # ensure same general finding parsing behaviour + self.assertEqual(f_xml.title, f_csv.title) + self.assertEqual(f_xml.severity, f_csv.severity) + self.assertEqual(f_xml.cvssv3_score, f_csv.cvssv3_score) + # remove this if future parser versions want different description behaviour + self.assertEqual(f_xml.description, f_csv.description) + + # ensure same vulnerability id parsing behaviour + self.assertEqual(f_xml.vuln_id_from_tool, f_csv.vuln_id_from_tool) + # xml has multiple types of vulnerability ids, change this if a new one is parsed + self.assertEqual(len(f_xml.unsaved_vulnerability_ids), len(f_csv.unsaved_vulnerability_ids)) + self.assertEqual(f_xml.unsaved_vulnerability_ids, f_csv.unsaved_vulnerability_ids) + + # ensure same endpoint parsing behaviour + self.assertEqual(f_xml.unsaved_endpoints[0].host, f_csv.unsaved_endpoints[0].host) + self.assertEqual(f_xml.unsaved_endpoints[0].protocol, f_csv.unsaved_endpoints[0].protocol) + self.assertEqual(f_xml.unsaved_endpoints[0].port, f_csv.unsaved_endpoints[0].port) + + def test_openvas_csv_report_combined_findings(self): + """Ensure findings combinding behaviour""" + with openvas_open("report_combine_v2.csv") as f: + findings = setup_openvas_v2_test(f) + self.assertEqual(1, len(findings)) + + def test_openvas_csv_many_findings(self): + """Ensure findings combinding behaviour""" + with openvas_open("many_vuln.csv") as f: + findings = setup_openvas_v2_test(f) + self.assertEqual(4, len(findings)) + + def test_openvas_xml_many_findings(self): + """Ensure findings combinding behaviour""" + with openvas_open("many_vuln.xml") as f: + findings = setup_openvas_v2_test(f) + self.assertEqual(44, len(findings)) + + +# V1 Parser tests class TestOpenVASParser(DojoTestCase): def test_openvas_csv_one_vuln(self): with (get_unit_tests_scans_path("openvas") / "one_vuln.csv").open(encoding="utf-8") as f: @@ -105,7 +212,10 @@ def test_openvas_xml_one_vuln(self): self.assertEqual(1, len(findings)) with self.subTest(i=0): finding = findings[0] - self.assertEqual("Mozilla Firefox Security Update (mfsa_2023-32_2023-36) - Windows_10.0.101.2_general/tcp", finding.title) + self.assertEqual( + "Mozilla Firefox Security Update (mfsa_2023-32_2023-36) - Windows_10.0.101.2_general/tcp", + finding.title, + ) self.assertEqual("High", finding.severity) def test_openvas_xml_many_vuln(self): diff --git a/unittests/tools/test_openvas_parser_v2.py b/unittests/tools/test_openvas_parser_v2.py deleted file mode 100644 index 1a549b41d26..00000000000 --- a/unittests/tools/test_openvas_parser_v2.py +++ /dev/null @@ -1,108 +0,0 @@ -from dojo.models import Engagement, Product, Test -from dojo.tools.openvas_v2.parser import OpenVASV2Parser -from unittests.dojo_test_case import DojoTestCase, get_unit_tests_scans_path - - -def openvas_open(file): - """Helper to get file handle to openvas test files""" - return (get_unit_tests_scans_path("openvas") / file).open(encoding="utf-8") - - -def setup_openvas_v2_test(f): - """Setup helper for general openvas_v2 test setup""" - test = Test() - test.engagement = Engagement() - test.engagement.product = Product() - parser = OpenVASV2Parser() - findings = parser.get_findings(f, test) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - return findings - - -class TestOpenVASParserV2(DojoTestCase): - # test empty cases - def test_openvas_csv_no_vuln(self): - """Ensure that an empty report does not throw and error and reports 0 findings""" - with openvas_open("no_vuln.csv") as f: - findings = setup_openvas_v2_test(f) - self.assertEqual(0, len(findings)) - - def test_openvas_xml_no_vuln(self): - """Ensure that an empty report does not throw and error and reports 0 findings""" - with openvas_open("no_vuln.xml") as f: - findings = setup_openvas_v2_test(f) - self.assertEqual(0, len(findings)) - - def test_openvas_parser_csv_detail(self): - """Ensure finding contains report data as expected""" - with openvas_open("report_detail_v2.csv") as f: - findings = setup_openvas_v2_test(f) - - # ensure single finding - self.assertEqual(len(findings), 1) - finding = findings[0] - - # general finding info tests - self.assertEqual("Microsoft Windows Multiple Vulnerabilities (KB5062557)", finding.title) - self.assertEqual("High", finding.severity) # OpenVAS report Critical findings as High - self.assertEqual(9.8, finding.cvssv3_score) - - # vulnerability id tests - self.assertEqual(finding.vuln_id_from_tool, "1.3.6.1.4.1.25623.1.0.836484") - self.assertEqual(finding.unsaved_vulnerability_ids[1], "CVE-2025-48823") - self.assertEqual(93, len(finding.unsaved_vulnerability_ids)) - - # endpoint tests - self.assertEqual(1, len(finding.unsaved_endpoints)) - self.assertEqual("server99", finding.unsaved_endpoints[0].host) - # this is example data normaly tested finding does not include this - self.assertEqual(42, finding.unsaved_endpoints[0].port) - self.assertEqual("tcp", finding.unsaved_endpoints[0].protocol) - - def test_openvas_parser_csv_xml_parity(self): - """Ensure xml and csv parser parse data that is the same between report in the same way""" - with openvas_open("report_detail_v2.csv") as f: - findings_csv = setup_openvas_v2_test(f) - with openvas_open("report_detail_v2.xml") as f: - findings_xml = setup_openvas_v2_test(f) - - f_xml = findings_xml[0] - f_csv = findings_csv[0] - - # ensure same general finding parsing behaviour - self.assertEqual(f_xml.title, f_csv.title) - self.assertEqual(f_xml.severity, f_csv.severity) - self.assertEqual(f_xml.cvssv3_score, f_csv.cvssv3_score) - # remove this if future parser versions want different description behaviour - self.assertEqual(f_xml.description, f_csv.description) - - # ensure same vulnerability id parsing behaviour - self.assertEqual(f_xml.vuln_id_from_tool, f_csv.vuln_id_from_tool) - # xml has multiple types of vulnerability ids, change this if a new one is parsed - self.assertEqual(len(f_xml.unsaved_vulnerability_ids), len(f_csv.unsaved_vulnerability_ids)) - self.assertEqual(f_xml.unsaved_vulnerability_ids, f_csv.unsaved_vulnerability_ids) - - # ensure same endpoint parsing behaviour - self.assertEqual(f_xml.unsaved_endpoints[0].host, f_csv.unsaved_endpoints[0].host) - self.assertEqual(f_xml.unsaved_endpoints[0].protocol, f_csv.unsaved_endpoints[0].protocol) - self.assertEqual(f_xml.unsaved_endpoints[0].port, f_csv.unsaved_endpoints[0].port) - - def test_openvas_csv_report_combined_findings(self): - """Ensure findings combinding behaviour""" - with openvas_open("report_combine_v2.csv") as f: - findings = setup_openvas_v2_test(f) - self.assertEqual(1, len(findings)) - - def test_openvas_csv_many_findings(self): - """Ensure findings combinding behaviour""" - with openvas_open("many_vuln.csv") as f: - findings = setup_openvas_v2_test(f) - self.assertEqual(4, len(findings)) - - def test_openvas_xml_many_findings(self): - """Ensure findings combinding behaviour""" - with openvas_open("many_vuln.xml") as f: - findings = setup_openvas_v2_test(f) - self.assertEqual(44, len(findings)) From ec556387ec1b79eda19e42a91cb63c6e037cc7a2 Mon Sep 17 00:00:00 2001 From: jostaub <67969701+jostaub@users.noreply.github.com> Date: Tue, 16 Sep 2025 18:13:12 +0200 Subject: [PATCH 15/15] fixed lint and bugs --- dojo/tools/factory.py | 2 +- dojo/tools/openvas/parser_v1/csv_parser.py | 3 ++- dojo/tools/openvas/parser_v2/common.py | 17 +++++++++-------- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/dojo/tools/factory.py b/dojo/tools/factory.py index 2f54e9d7592..a536607f640 100644 --- a/dojo/tools/factory.py +++ b/dojo/tools/factory.py @@ -122,7 +122,7 @@ def requires_tool_type(scan_type): # Allow parser class names with optional v[number] suffix (e.g., OpenVASParser, OpenVASParserV2) expected_base = module_name.replace("_", "") + "parser" if isclass(attribute) and ( - attribute_name.lower() == expected_base or + attribute_name.lower() == expected_base or re.match(rf"^{re.escape(expected_base)}v\d+$", attribute_name.lower()) ): register(attribute) diff --git a/dojo/tools/openvas/parser_v1/csv_parser.py b/dojo/tools/openvas/parser_v1/csv_parser.py index 19fd92b33c4..1a87d17b73e 100644 --- a/dojo/tools/openvas/parser_v1/csv_parser.py +++ b/dojo/tools/openvas/parser_v1/csv_parser.py @@ -79,7 +79,7 @@ def __init__(self): super().__init__() def map_column_value(self, finding, column_value): - if column_value != "": + if column_value: if "," in column_value: finding.description += "\n**All CVEs:** " + str(column_value) for value in column_value.split(","): @@ -234,6 +234,7 @@ def __init__(self): def map_column_value(self, finding, column_value): finding.duplicate = self.evaluate_bool_value(column_value) + class OpenVASCSVParser: def create_chain(self): date_column_strategy = DateColumnMappingStrategy() diff --git a/dojo/tools/openvas/parser_v2/common.py b/dojo/tools/openvas/parser_v2/common.py index 3325d95969d..cd69640ee6c 100644 --- a/dojo/tools/openvas/parser_v2/common.py +++ b/dojo/tools/openvas/parser_v2/common.py @@ -65,16 +65,17 @@ def deduplicate(dupes: dict[str, Finding], finding: Finding): org.unsaved_endpoints += finding.unsaved_endpoints -def id_from_tool_finding_hash(finding: Finding): - """Generate a hash that complements final hash generating outside of this parser""" +def dedup_finding_hash(finding: Finding): + """Generate a hash for a finding that is used for deduplication of findings inside the current report""" endpoint = finding.unsaved_endpoints[0] - - if "endpoints" in HASHCODE_FIELDS_PER_SCANNER["OpenVAS Parser v2"]: - pass - hash_data = [ str(endpoint.protocol), - str(endpoint.port), # keep findings on different port seperate as it may be different applications - finding.severity, # allows changing severity of finding after import + str(endpoint.userinfo), + str(endpoint.port), + str(endpoint.path), + str(endpoint.fragment), + finding.title, + finding.vuln_id_from_tool, + finding.severity, ] return hashlib.sha256("|".join(hash_data).encode("utf-8")).hexdigest()