diff --git a/docs/content/en/about_defectdojo/about_docs.md b/docs/content/en/about_defectdojo/about_docs.md index e098872c901..49640238f31 100644 --- a/docs/content/en/about_defectdojo/about_docs.md +++ b/docs/content/en/about_defectdojo/about_docs.md @@ -67,6 +67,8 @@ Other guides for working with an Open-Source install: If you run into trouble with an Open Source install, we highly recommend asking questions on the [OWASP Slack](https://owasp.org/slack/invite). Our community members are active on the **# defectdojo** channel and can help you with issues you’re facing. +Looking for cool DefectDojo laptop stickers? As a thank you for being a part of the DefectDojo community, you can sign up to get some free DefectDojo stickers. For more information, check out [this link](https://defectdojo.com/defectdojo-sticker-request). + ### Online Demo A running example of DefectDojo (Open-Source Edition) is available on [our demo server](https://demo.defectdojo.org), using the credentials `admin` / `1Defectdojo@demo#appsec`. The demo server is refreshed regularly and provisioned with some sample data. diff --git a/docs/content/en/changelog/changelog.md b/docs/content/en/changelog/changelog.md index 712dcfa2cb4..2cc71a0a6f7 100644 --- a/docs/content/en/changelog/changelog.md +++ b/docs/content/en/changelog/changelog.md @@ -10,6 +10,11 @@ For Open Source release notes, please see the [Releases page on GitHub](https:// ## Sept 2025: v2.50 +### Sept 15, 2025: v2.50.2 + +* **(Pro UI)** Added Any/All status filtering. Filtering by status allows you to apply either AND (inner join) logic, or OR (outer join) logic to the filter. +* **(Pro UI)** Added Contact Support form for On-Premise installs. + ### Sept 9, 2025: v2.50.1 * **(Tools)** Removed CSV limit for Qualys HackerGuardian diff --git a/docs/content/en/connecting_your_tools/parsers/file/generic.md b/docs/content/en/connecting_your_tools/parsers/file/generic.md index f9c9943378e..6905c0ceed1 100644 --- a/docs/content/en/connecting_your_tools/parsers/file/generic.md +++ b/docs/content/en/connecting_your_tools/parsers/file/generic.md @@ -3,9 +3,9 @@ title: 'Generic Findings Import' toc_hide: true --- -Import Generic findings in CSV or JSON format. +Generic Findings Import can be used to import any report in CSV or JSON format. -Attributes supported for CSV: +### Supported Attributes (CSV) - Date: Date of the finding in mm/dd/yyyy format. - Title: Title of the finding @@ -37,6 +37,8 @@ The CSV expects a header row with the names of the attributes. Date fields are parsed using [dateutil.parse](https://dateutil.readthedocs.io/en/stable/parser.html) supporting a variety of formats such a YYYY-MM-DD or ISO-8601. +### Supported Attributes (JSON) + The list of supported fields in JSON format: - title: **Required.** String @@ -93,7 +95,7 @@ The list of supported fields in JSON format: - ransomware_used: Bool - fix_available: Bool -Example of JSON format: +### Example JSON ```JSON { diff --git a/docs/content/en/connecting_your_tools/parsers/generic_findings_import.md b/docs/content/en/connecting_your_tools/parsers/generic_findings_import.md index a9d40341a30..06c229ef2e1 100644 --- a/docs/content/en/connecting_your_tools/parsers/generic_findings_import.md +++ b/docs/content/en/connecting_your_tools/parsers/generic_findings_import.md @@ -1,136 +1,18 @@ --- -title: "Generic Findings Import" +title: "Using Generic Findings Import" toc_hide: true weight: 2 --- -You can use Generic Findings Import as a method to ingest JSON or CSV files into DefectDojo which are not already in the supported parsers list. +Open-source and Pro users can use Generic Findings Import as a method to ingest JSON or CSV files into DefectDojo which are not already in the supported Tools list. -Files uploaded using Generic Findings Import must conform to the accepted format with respect to CSV column headers / JSON attributes. +Using Generic Findings Import will create a new Test Type in your DefectDojo instance called "`{The Name Of Your Test}` (Generic Findings Import)". For example, this JSON content will result in a Test Type called "Example Report (Generic Findings Import)": -These attributes are supported for CSV: - -- Date: Date of the finding in mm/dd/yyyy format. -- Title: Title of the finding -- CweId: Cwe identifier, must be an integer value. -- epss_score: The probability of exploitation in the next 30 days, must be a float value between 0 and 1.0. -- epss_percentile: The proportion of all scored vulnerabilities with the same or a lower EPSS score, must be a float value between 0 and 1.0. -- Url: Url associated with the finding. -- Severity: Severity of the finding. Must be one of Info, Low, Medium, High, or Critical. -- Description: Description of the finding. Can be multiple lines if enclosed in double quotes. -- Mitigation: Possible Mitigations for the finding. Can be multiple lines if enclosed in double quotes. -- Impact: Detailed impact of the finding. Can be multiple lines if enclosed in double quotes. -- References: References associated with the finding. Can be multiple lines if enclosed in double quotes. -- Active: Indicator if the finding is active. Must be empty, TRUE or FALSE -- Verified: Indicator if the finding has been verified. Must be empty, TRUE, or FALSE -- FalsePositive: Indicator if the finding is a false positive. Must be TRUE, or FALSE. -- Duplicate: Indicator if the finding is a duplicate. Must be TRUE, or FALSE - -The CSV expects a header row with the names of the attributes. - -Example of JSON format: - -```JSON -{ - "findings": [ - { - "title": "test title with endpoints as dict", - "description": "Some very long description with\n\n some UTF-8 chars à qu'il est beau", - "severity": "Medium", - "mitigation": "Some mitigation", - "date": "2021-01-06", - "cve": "CVE-2020-36234", - "cwe": 261, - "cvssv3": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N", - "file_path": "src/first.cpp", - "line": 13, - "endpoints": [ - { - "host": "exemple.com" - } - ] - }, - { - "title": "test title with endpoints as strings", - "description": "Some very long description with\n\n some UTF-8 chars à qu'il est beau2", - "severity": "Critical", - "mitigation": "Some mitigation", - "date": "2021-01-06", - "cve": "CVE-2020-36235", - "cwe": 287, - "cvssv3": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N", - "file_path": "src/two.cpp", - "line": 135, - "endpoints": [ - "http://urlfiltering.paloaltonetworks.com/test-command-and-control", - "https://urlfiltering.paloaltonetworks.com:2345/test-pest" - ] - }, - { - "title": "test title", - "description": "Some very long description with\n\n some UTF-8 chars à qu'il est beau2", - "severity": "Critical", - "mitigation": "Some mitigation", - "date": "2021-01-06", - "cve": "CVE-2020-36236", - "cwe": 287, - "cvssv3": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N", - "file_path": "src/threeeeeeeeee.cpp", - "line": 1353 - } - ] -} -``` - -This parser supports an attributes that accept files as Base64 strings. These files are attached to the respective findings. - -Example: - -```JSON -{ - "name": "My wonderful report", - "findings": [ - { - "title": "Vuln with image", - "description": "Some very long description", - "severity": "Medium", - "files": [ - { - "title": "Screenshot from 2017-04-10 16-54-19.png", - "data": "iVBORw0KGgoAAAANSUhEUgAABWgAAAK0CAIAAAARSkPJAAAAA3N<...>TkSuQmCC" - } - ] - } - ] -} -``` - -This parser supports some additional attributes to be able to define custom `TestTypes` as well as influencing some meta fields on the `Test`: - -- `name`: The internal name of the tool you are using. This is primarily informational, and used for reading the report manually. -- `type`: The name of the test type to create in DefectDojo with the suffix of `(Generic Findings Import)`. The suffix is an important identifier for future users attempting to identify the test type to supply when importing new reports. This value is very important when fetching the correct test type to import findings into, so be sure to keep the `type` consistent from import to import! As an example, a report submitted with a `type` of `Internal Company Tool` will produce a test type in DefectDojo with the title `Internal Company Tool (Generic Findings Import)`. With this newly created test type, you can define custom `HASHCODE_FIELDS` or `DEDUPLICATION_ALGORITHM` in the settings. -- `version`: The version of the tool you are using. This is primarily informational, and is used for reading the report manually and tracking format changes from version to version. -- `description`: A brief description of the test. This could be an explanation of what the tool is reporting, where the tools is maintained, who the point of contact is for the tool when issues arise, or anything in between. -- `static_tool`: Dictates that tool used is running static analysis methods to discover vulnerabilities. -- `dynamic_tool`: Dictates that tool used is running dynamic analysis methods to discover vulnerabilities. -- `soc`: Dictates that tool is used for reporting alerts from a soc (Pro Edition Only). - -Example: - -```JSON { - "name": "My wonderful report", - "type": "My custom Test type", - "version": "1.0.5", - "description": "A unicorn tool that is capable of static analysis, dynamic analysis, and even capturing soc alerts!", - "static_tool": true, - "dynamic_tool": true, - "soc": true, - "findings": [ - ] + "name": "Example Report", + "findings": [] } -``` -### Sample Scan Data +DefectDojo Pro users can also consider using the [Universal Parser](../universal_parser), a tool which allows for highly customizable JSON, XML and CSV imports. -Sample Generic Findings Import scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/generic). \ No newline at end of file +For more information on supported parameters for Generic Findings Import, see the [Parser Guide](../file/generic) \ No newline at end of file diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py index 29faadb639e..a06f32e8441 100644 --- a/dojo/engagement/views.py +++ b/dojo/engagement/views.py @@ -1552,6 +1552,10 @@ def engagement_ics(request, eid): eng = get_object_or_404(Engagement, id=eid) start_date = datetime.combine(eng.target_start, datetime.min.time()) end_date = datetime.combine(eng.target_end, datetime.max.time()) + if timezone.is_naive(start_date): + start_date = timezone.make_aware(start_date) + if timezone.is_naive(end_date): + end_date = timezone.make_aware(end_date) uid = f"dojo_eng_{eng.id}_{eng.product.id}" cal = get_cal_event( start_date, diff --git a/dojo/filters.py b/dojo/filters.py index c16f07e82ae..69ebe7cbae4 100644 --- a/dojo/filters.py +++ b/dojo/filters.py @@ -30,7 +30,7 @@ RangeFilter, ) from django_filters import rest_framework as filters -from django_filters.filters import ChoiceFilter, _truncate +from django_filters.filters import ChoiceFilter from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import extend_schema_field from polymorphic.base import ManagerInheritanceWarning @@ -92,7 +92,7 @@ from dojo.risk_acceptance.queries import get_authorized_risk_acceptances from dojo.test.queries import get_authorized_tests from dojo.user.queries import get_authorized_users -from dojo.utils import get_system_setting, is_finding_groups_enabled +from dojo.utils import get_system_setting, is_finding_groups_enabled, truncate_timezone_aware logger = logging.getLogger(__name__) @@ -194,8 +194,8 @@ def filter(self, qs, value): if earliest_finding is not None: start_date = datetime.combine( earliest_finding.date, datetime.min.time()).replace(tzinfo=tzinfo()) - self.start_date = _truncate(start_date - timedelta(days=1)) - self.end_date = _truncate(now() + timedelta(days=1)) + self.start_date = truncate_timezone_aware(start_date - timedelta(days=1)) + self.end_date = truncate_timezone_aware(now() + timedelta(days=1)) try: value = int(value) except (ValueError, TypeError): @@ -654,16 +654,16 @@ class DateRangeFilter(ChoiceFilter): f"{name}__day": now().day, })), 2: (_("Past 7 days"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() - timedelta(days=7)), - f"{name}__lt": _truncate(now() + timedelta(days=1)), + f"{name}__gte": truncate_timezone_aware(now() - timedelta(days=7)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=1)), })), 3: (_("Past 30 days"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() - timedelta(days=30)), - f"{name}__lt": _truncate(now() + timedelta(days=1)), + f"{name}__gte": truncate_timezone_aware(now() - timedelta(days=30)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=1)), })), 4: (_("Past 90 days"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() - timedelta(days=90)), - f"{name}__lt": _truncate(now() + timedelta(days=1)), + f"{name}__gte": truncate_timezone_aware(now() - timedelta(days=90)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=1)), })), 5: (_("Current month"), lambda qs, name: qs.filter(**{ f"{name}__year": now().year, @@ -673,8 +673,8 @@ class DateRangeFilter(ChoiceFilter): f"{name}__year": now().year, })), 7: (_("Past year"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() - timedelta(days=365)), - f"{name}__lt": _truncate(now() + timedelta(days=1)), + f"{name}__gte": truncate_timezone_aware(now() - timedelta(days=365)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=1)), })), } @@ -700,43 +700,43 @@ class DateRangeOmniFilter(ChoiceFilter): f"{name}__day": now().day, })), 2: (_("Next 7 days"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() + timedelta(days=1)), - f"{name}__lt": _truncate(now() + timedelta(days=7)), + f"{name}__gte": truncate_timezone_aware(now() + timedelta(days=1)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=7)), })), 3: (_("Next 30 days"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() + timedelta(days=1)), - f"{name}__lt": _truncate(now() + timedelta(days=30)), + f"{name}__gte": truncate_timezone_aware(now() + timedelta(days=1)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=30)), })), 4: (_("Next 90 days"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() + timedelta(days=1)), - f"{name}__lt": _truncate(now() + timedelta(days=90)), + f"{name}__gte": truncate_timezone_aware(now() + timedelta(days=1)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=90)), })), 5: (_("Past 7 days"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() - timedelta(days=7)), - f"{name}__lt": _truncate(now() + timedelta(days=1)), + f"{name}__gte": truncate_timezone_aware(now() - timedelta(days=7)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=1)), })), 6: (_("Past 30 days"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() - timedelta(days=30)), - f"{name}__lt": _truncate(now() + timedelta(days=1)), + f"{name}__gte": truncate_timezone_aware(now() - timedelta(days=30)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=1)), })), 7: (_("Past 90 days"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() - timedelta(days=90)), - f"{name}__lt": _truncate(now() + timedelta(days=1)), + f"{name}__gte": truncate_timezone_aware(now() - timedelta(days=90)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=1)), })), 8: (_("Current month"), lambda qs, name: qs.filter(**{ f"{name}__year": now().year, f"{name}__month": now().month, })), 9: (_("Past year"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() - timedelta(days=365)), - f"{name}__lt": _truncate(now() + timedelta(days=1)), + f"{name}__gte": truncate_timezone_aware(now() - timedelta(days=365)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=1)), })), 10: (_("Current year"), lambda qs, name: qs.filter(**{ f"{name}__year": now().year, })), 11: (_("Next year"), lambda qs, name: qs.filter(**{ - f"{name}__gte": _truncate(now() + timedelta(days=1)), - f"{name}__lt": _truncate(now() + timedelta(days=365)), + f"{name}__gte": truncate_timezone_aware(now() + timedelta(days=1)), + f"{name}__lt": truncate_timezone_aware(now() + timedelta(days=365)), })), } @@ -818,8 +818,8 @@ def any(self, qs, name): if earliest_finding is not None: start_date = datetime.combine( earliest_finding.date, datetime.min.time()).replace(tzinfo=tzinfo()) - self.start_date = _truncate(start_date - timedelta(days=1)) - self.end_date = _truncate(now() + timedelta(days=1)) + self.start_date = truncate_timezone_aware(start_date - timedelta(days=1)) + self.end_date = truncate_timezone_aware(now() + timedelta(days=1)) return qs.all() return None @@ -839,8 +839,8 @@ def current_year(self, qs, name): }) def past_x_days(self, qs, name, days): - self.start_date = _truncate(now() - timedelta(days=days)) - self.end_date = _truncate(now() + timedelta(days=1)) + self.start_date = truncate_timezone_aware(now() - timedelta(days=days)) + self.end_date = truncate_timezone_aware(now() + timedelta(days=1)) return qs.filter(**{ f"{name}__gte": self.start_date, f"{name}__lt": self.end_date, @@ -884,8 +884,8 @@ def filter(self, qs, value): if earliest_finding is not None: start_date = datetime.combine( earliest_finding.date, datetime.min.time()).replace(tzinfo=tzinfo()) - self.start_date = _truncate(start_date - timedelta(days=1)) - self.end_date = _truncate(now() + timedelta(days=1)) + self.start_date = truncate_timezone_aware(start_date - timedelta(days=1)) + self.end_date = truncate_timezone_aware(now() + timedelta(days=1)) try: value = int(value) except (ValueError, TypeError): diff --git a/dojo/finding/views.py b/dojo/finding/views.py index 901ef7e8e6f..271717cb6c3 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -2651,6 +2651,7 @@ def finding_bulk_update_all(request, pid=None): find.false_p = form.cleaned_data["false_p"] find.out_of_scope = form.cleaned_data["out_of_scope"] find.is_mitigated = form.cleaned_data["is_mitigated"] + find.under_review = form.cleaned_data["under_review"] find.last_reviewed = timezone.now() find.last_reviewed_by = request.user diff --git a/dojo/forms.py b/dojo/forms.py index 17c2c18727e..a460a09f722 100644 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -343,6 +343,10 @@ class ProductForm(forms.ModelForm): product_manager = forms.ModelChoiceField(queryset=Dojo_User.objects.exclude(is_active=False).order_by("first_name", "last_name"), required=False) technical_contact = forms.ModelChoiceField(queryset=Dojo_User.objects.exclude(is_active=False).order_by("first_name", "last_name"), required=False) team_manager = forms.ModelChoiceField(queryset=Dojo_User.objects.exclude(is_active=False).order_by("first_name", "last_name"), required=False) + tags = TagField( + required=False, + help_text="Add tags that help describe this product. Choose from the list or add new tags. Press Enter key to add.", + ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -1014,6 +1018,10 @@ class EngForm(forms.ModelForm): queryset=None, required=True, label="Testing Lead") test_strategy = forms.URLField(required=False, label="Test Strategy URL") + tags = TagField( + required=False, + help_text="Add tags that help describe this engagement. Choose from the list or add new tags. Press Enter key to add.", + ) def __init__(self, *args, **kwargs): cicd = False @@ -1093,10 +1101,13 @@ class TestForm(forms.ModelForm): attrs={"class": "datepicker", "autocomplete": "off"})) target_end = forms.DateTimeField(widget=forms.TextInput( attrs={"class": "datepicker", "autocomplete": "off"})) - lead = forms.ModelChoiceField( queryset=None, required=False, label="Testing Lead") + tags = TagField( + required=False, + help_text="Add tags that help describe this test. Choose from the list or add new tags. Press Enter key to add.", + ) def __init__(self, *args, **kwargs): obj = None @@ -1453,6 +1464,10 @@ class FindingForm(forms.ModelForm): choices=EFFORT_FOR_FIXING_CHOICES, error_messages={ "invalid_choice": EFFORT_FOR_FIXING_INVALID_CHOICE}) + tags = TagField( + required=False, + help_text="Add tags that help describe this finding. Choose from the list or add new tags. Press Enter key to add.", + ) # the only reliable way without hacking internal fields to get predicatble ordering is to make it explicit field_order = ("title", "group", "date", "sla_start_date", "sla_expiration_date", "cwe", "vulnerability_ids", "severity", "cvss_info", "cvssv3", @@ -1720,10 +1735,15 @@ def clean_tags(self): class Meta: model = Finding fields = ("severity", "date", "planned_remediation_date", "active", "verified", "false_p", "duplicate", "out_of_scope", - "is_mitigated") + "under_review", "is_mitigated") class EditEndpointForm(forms.ModelForm): + tags = TagField( + required=False, + help_text="Add tags that help describe this endpoint. Choose from the list or add new tags. Press Enter key to add.", + ) + class Meta: model = Endpoint exclude = ["product", "inherited_tags"] diff --git a/dojo/metrics/utils.py b/dojo/metrics/utils.py index eac3a4adba5..913d62c2361 100644 --- a/dojo/metrics/utils.py +++ b/dojo/metrics/utils.py @@ -404,6 +404,8 @@ def js_epoch( """ if isinstance(d, date): d = datetime.combine(d, datetime.min.time()) + if timezone.is_naive(d): + d = timezone.make_aware(d) return int(d.timestamp()) * 1000 diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 7b134e52ad3..1dace4689c4 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1862,6 +1862,7 @@ def saml2_attrib_map_format(din): "MGAA-": "https://advisories.mageia.org/&&.html", # e.g. https://advisories.mageia.org/MGAA-2013-0054.html "MGASA-": "https://advisories.mageia.org/&&.html", # e.g. https://advisories.mageia.org/MGASA-2025-0023.html "NCSC-": "https://advisories.ncsc.nl/advisory?id=", # e.g. https://advisories.ncsc.nl/advisory?id=NCSC-2025-0191 + "NN-": "https://cvepremium.circl.lu/vuln/", # e.g. https://cvepremium.circl.lu/vuln/NN-2021:2-01 "NTAP-": "https://security.netapp.com/advisory/", # e.g. https://security.netapp.com/advisory/ntap-20250328-0007 "OPENSUSE-SU-": "https://osv.dev/vulnerability/", # e.g. https://osv.dev/vulnerability/openSUSE-SU-2025:14898-1 "OSV-": "https://osv.dev/vulnerability/", # e.g. https://osv.dev/vulnerability/OSV-2024-1330 diff --git a/dojo/test/views.py b/dojo/test/views.py index 46af27b444a..06301d20813 100644 --- a/dojo/test/views.py +++ b/dojo/test/views.py @@ -405,6 +405,10 @@ def test_ics(request, tid): test = get_object_or_404(Test, id=tid) start_date = datetime.combine(test.target_start, datetime.min.time()) end_date = datetime.combine(test.target_end, datetime.max.time()) + if timezone.is_naive(start_date): + start_date = timezone.make_aware(start_date) + if timezone.is_naive(end_date): + end_date = timezone.make_aware(end_date) uid = f"dojo_test_{test.id}_{test.engagement.id}_{test.engagement.product.id}" cal = get_cal_event( start_date, diff --git a/dojo/tools/blackduck_component_risk/parser.py b/dojo/tools/blackduck_component_risk/parser.py index bf601da2fda..b8e529328c2 100644 --- a/dojo/tools/blackduck_component_risk/parser.py +++ b/dojo/tools/blackduck_component_risk/parser.py @@ -67,6 +67,7 @@ def ingest_findings(self, components, securities, sources, test): description = self.license_description(component, source) severity = "High" mitigation = self.license_mitigation(component) + fix_available = bool(mitigation) impact = "N/A" references = self.license_references(component) finding = Finding( @@ -79,6 +80,7 @@ def ingest_findings(self, components, securities, sources, test): references=references, static_finding=True, unique_id_from_tool=component_id, + fix_available=fix_available, ) license_risk.append(finding) elif "None" not in self.license_severity(component): @@ -88,6 +90,7 @@ def ingest_findings(self, components, securities, sources, test): description = self.license_description(component, source) severity = self.license_severity(component) mitigation = self.license_mitigation(component, violation=False) + fix_available = bool(mitigation) impact = "N/A" references = self.license_references(component) finding = Finding( @@ -100,6 +103,7 @@ def ingest_findings(self, components, securities, sources, test): references=references, static_finding=True, unique_id_from_tool=component_id, + fix_available=fix_available, ) license_risk.append(finding) items.extend(license_risk) @@ -111,10 +115,10 @@ def ingest_findings(self, components, securities, sources, test): description = self.security_description(vulns) severity = self.security_severity(vulns) mitigation = self.security_mitigation(vulns) + fix_available = bool(mitigation) impact = self.security_impact(vulns) references = self.security_references(vulns) file_path = self.security_filepath(vulns) - finding = Finding( title=title, test=test, @@ -126,6 +130,7 @@ def ingest_findings(self, components, securities, sources, test): static_finding=True, file_path=file_path, unique_id_from_tool=component_id, + fix_available=fix_available, ) security_risk.append(finding) items.extend(security_risk) diff --git a/dojo/tools/cyberwatch_galeax/parser.py b/dojo/tools/cyberwatch_galeax/parser.py index 7c8353917e2..600001da2a5 100644 --- a/dojo/tools/cyberwatch_galeax/parser.py +++ b/dojo/tools/cyberwatch_galeax/parser.py @@ -5,6 +5,7 @@ import cvss.parser from cvss.cvss3 import CVSS3 +from django.utils import timezone from dojo.models import Endpoint, Endpoint_Status, Finding @@ -202,7 +203,7 @@ def build_findings_for_cve(self, cve_code, c_data, test): products = c_data["products"] if not products: - mitigated_date = datetime.now() + mitigated_date = timezone.now() mitigation = f"Fixed At: {mitigated_date}" endpoints = [Endpoint(host=e) for e in c_data["no_product_endpoints"]] @@ -269,7 +270,7 @@ def determine_product_finding_state(self, p_data): active_status = any(am[0] for am in p_data["active_mitigated_data"]) mitigated_date = (max(am[1] for am in p_data["active_mitigated_data"] if am[1]) if [am[1] for am in p_data["active_mitigated_data"] if am[1]] and not active_status - else (datetime.now() if not active_status else None)) + else (timezone.now() if not active_status else None)) return component_version_str, active_status, mitigated_date def create_finding( @@ -466,7 +467,7 @@ def process_servers_for_security_issue(self, servers): active_status = True mitigated_date = None else: - mitigated_date = datetime.now() + mitigated_date = timezone.now() mitigated_dates.append(mitigated_date) detected_at_str = server.get("detected_at") @@ -483,7 +484,7 @@ def process_servers_for_security_issue(self, servers): ) unsaved_endpoint_status.append(endpoint_status) - mitigated_date = (max(mitigated_dates) if mitigated_dates else datetime.now()) if not active_status else None + mitigated_date = (max(mitigated_dates) if mitigated_dates else timezone.now()) if not active_status else None return unsaved_endpoints, unsaved_endpoint_status, active_status, mitigated_date def parse_detected_at(self, detected_at_str): @@ -491,7 +492,7 @@ def parse_detected_at(self, detected_at_str): try: return datetime.strptime(detected_at_str, "%Y-%m-%dT%H:%M:%S.%fZ") except (ValueError, TypeError): - return datetime.now() + return timezone.now() def parse_fixed_at(self, fixed_at_str): """Parse fixed_at datetime, defaulting to now if parsing fails.""" diff --git a/dojo/tools/wazuh/v4_8.py b/dojo/tools/wazuh/v4_8.py index 3192e38d231..636ee0210d5 100644 --- a/dojo/tools/wazuh/v4_8.py +++ b/dojo/tools/wazuh/v4_8.py @@ -1,5 +1,3 @@ -import hashlib - from dojo.models import Finding @@ -11,29 +9,26 @@ def parse_findings(self, test, data): item = item_source.get("_source") vuln = item.get("vulnerability") cve = vuln.get("id") + + # Construct a unique key for deduplication + dupe_key = f"{cve}-{item.get('agent', {}).get('id')}" + + if dupe_key in dupes: + continue # Skip if this finding has already been processed + description = vuln.get("description") description += "\nAgent id:" + item.get("agent").get("id") description += "\nAgent name:" + item.get("agent").get("name") severity = vuln.get("severity") cvssv3_score = vuln.get("score").get("base") publish_date = vuln.get("published_at").split("T")[0] - agent_id = item.get("agent").get("id") detection_time = vuln.get("detected_at").split("T")[0] - references = vuln.get("reference") title = ( cve + " affects (version: " + item.get("package").get("version") + ")" ) - dupe_key = title + agent_id + description - dupe_key = hashlib.sha256(dupe_key.encode("utf-8")).hexdigest() - - if dupe_key in dupes: - find = dupes[dupe_key] - else: - dupes[dupe_key] = True - find = Finding( title=title, test=test, @@ -48,6 +43,7 @@ def parse_findings(self, test, data): unique_id_from_tool=dupe_key, date=detection_time, ) - find.unsaved_vulnerability_ids = cve + find.unsaved_vulnerability_ids = [cve] dupes[dupe_key] = find + return list(dupes.values()) diff --git a/dojo/utils.py b/dojo/utils.py index a8b6b1aa86b..858d24c6bb6 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -1596,8 +1596,13 @@ def calculate_grade(product, *args, **kwargs): aeval = Interpreter() aeval(system_settings.product_grade) grade_product = f"grade_product({critical}, {high}, {medium}, {low})" - product.prod_numeric_grade = aeval(grade_product) - super(Product, product).save() + prod_numeric_grade = aeval(grade_product) + if prod_numeric_grade != product.prod_numeric_grade: + logger.debug("Updating product %s grade from %s to %s", product.id, product.prod_numeric_grade, prod_numeric_grade) + product.prod_numeric_grade = prod_numeric_grade + super(Product, product).save() + else: + logger.debug("Product %s grade %i is up to date", product.id, prod_numeric_grade) def get_celery_worker_status(): @@ -2463,7 +2468,8 @@ def get_open_findings_burndown(product): findings = Finding.objects.filter(test__engagement__product=product, duplicate=False) f_list = list(findings) - curr_date = datetime.combine(datetime.now(), datetime.min.time()) + curr_date = datetime.combine(timezone.now().date(), datetime.min.time()) + curr_date = timezone.make_aware(curr_date) start_date = curr_date - timedelta(days=90) critical_count = 0 @@ -2701,3 +2707,21 @@ def parse_cvss_data(cvss_vector_string: str) -> dict: } logger.debug("No valid CVSS3 or CVSS4 vector found in %s", cvss_vector_string) return {} + + +def truncate_timezone_aware(dt): + """ + Truncate datetime to date and make it timezone-aware. + This replaces the django_filters._truncate function which creates naive datetimes. + """ + if dt is None: + return None + + # Get the date part and create a new datetime at midnight + truncated = datetime.combine(dt.date(), datetime.min.time()) + + # Make it timezone-aware if it isn't already + if timezone.is_naive(truncated): + truncated = timezone.make_aware(truncated) + + return truncated diff --git a/tests/check_various_pages.py b/tests/check_various_pages.py index c78c7c596d0..aa1188253cc 100644 --- a/tests/check_various_pages.py +++ b/tests/check_various_pages.py @@ -32,6 +32,11 @@ def test_finding_group_open_filtered_status(self): driver = self.driver driver.get(self.base_url + "finding_group/open?name=CVE&severity=Medium&engagement=14&product=6") + def test_date_filter(self): + driver = self.driver + # can result in an error about date not having timezone information + driver.get(self.base_url + "finding/open?last_status_update=2") + def suite(): suite = unittest.TestSuite() @@ -42,6 +47,7 @@ def suite(): suite.addTest(VariousPagesTest("test_finding_group_all_status")) suite.addTest(VariousPagesTest("test_finding_group_closed_status")) suite.addTest(VariousPagesTest("test_finding_group_open_filtered_status")) + suite.addTest(VariousPagesTest("test_date_filter")) return suite diff --git a/unittests/test_importers_performance.py b/unittests/test_importers_performance.py index a629b7f97c3..5a3291eae45 100644 --- a/unittests/test_importers_performance.py +++ b/unittests/test_importers_performance.py @@ -222,10 +222,10 @@ def test_import_reimport_reimport_performance_no_async_with_product_grading(self self.system_settings(enable_product_grade=True) self.import_reimport_performance( - expected_num_queries1=702, + expected_num_queries1=687, expected_num_async_tasks1=15, - expected_num_queries2=645, + expected_num_queries2=621, expected_num_async_tasks2=28, - expected_num_queries3=322, + expected_num_queries3=302, expected_num_async_tasks3=25, ) diff --git a/unittests/tools/test_blackduck_component_risk_parser.py b/unittests/tools/test_blackduck_component_risk_parser.py index 5ae931bc1f0..773711df64e 100644 --- a/unittests/tools/test_blackduck_component_risk_parser.py +++ b/unittests/tools/test_blackduck_component_risk_parser.py @@ -10,3 +10,11 @@ def test_blackduck_enhanced_zip_upload(self): parser = BlackduckComponentRiskParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(12, len(findings)) + findings = list(findings) + self.assertEqual("License Risk: xmldom:0.1.21", findings[0].title) + self.assertEqual(True, findings[0].fix_available) + self.assertEqual("Package has a license that is In Violation and should not be used: xmldom:0.1.21. Please use another component with an acceptable license.", findings[0].mitigation) + self.assertEqual("High", findings[0].severity) + self.assertEqual("N/A", findings[0].impact) + self.assertEqual("**Project:** foo-project ID-355b2cb252662e07153802b82041e8322ccef144-1.0.0\n", findings[0].references) + self.assertEqual(None, findings[0].file_path)