From dd44020976bfa7e9ff28e0538d68ac7ab79b88c6 Mon Sep 17 00:00:00 2001 From: DefectDojo release bot Date: Mon, 15 Dec 2025 20:53:37 +0000 Subject: [PATCH 01/22] Update versions in application files --- components/package.json | 2 +- helm/defectdojo/Chart.yaml | 8 ++++---- helm/defectdojo/README.md | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/components/package.json b/components/package.json index 77178a7ad3e..d9500b421b6 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.53.3", + "version": "2.54.0-dev", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index cf9d539c013..0bbd413b257 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "2.53.3" +appVersion: "2.54.0-dev" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.9.3 +version: 1.9.4-dev icon: https://defectdojo.com/hubfs/DefectDojo_favicon.png maintainers: - name: madchap @@ -33,5 +33,5 @@ dependencies: # - kind: security # description: Critical bug annotations: - artifacthub.io/prerelease: "false" - artifacthub.io/changes: "- kind: changed\n description: Bump DefectDojo to 2.53.3\n" + artifacthub.io/prerelease: "true" + artifacthub.io/changes: "" diff --git a/helm/defectdojo/README.md b/helm/defectdojo/README.md index f68686a0916..e54f00ce161 100644 --- a/helm/defectdojo/README.md +++ b/helm/defectdojo/README.md @@ -511,7 +511,7 @@ The HELM schema will be generated for you. # General information about chart values -![Version: 1.9.3](https://img.shields.io/badge/Version-1.9.3-informational?style=flat-square) ![AppVersion: 2.53.3](https://img.shields.io/badge/AppVersion-2.53.3-informational?style=flat-square) +![Version: 1.9.4-dev](https://img.shields.io/badge/Version-1.9.4--dev-informational?style=flat-square) ![AppVersion: 2.54.0-dev](https://img.shields.io/badge/AppVersion-2.54.0--dev-informational?style=flat-square) A Helm chart for Kubernetes to install DefectDojo From 79004260ab9d695a9e533916432801fc1c4e0519 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Tue, 16 Dec 2025 15:01:41 -0600 Subject: [PATCH 02/22] Refactor GitHub integration error handling (#13913) * Refactor GitHub integration error handling for improved clarity and robustness * Fix GitHub integration to correctly check for existing issues instead of project keys --- dojo/github.py | 75 +++++++++++++++++++++++++++----------------------- 1 file changed, 41 insertions(+), 34 deletions(-) diff --git a/dojo/github.py b/dojo/github.py index 1f0f33c3313..6980000fdd4 100644 --- a/dojo/github.py +++ b/dojo/github.py @@ -15,25 +15,29 @@ def reopen_external_issue_github(find, note, prod, eng): - + # Ensure the system setting for GitHub integration is enabled from dojo.utils import get_system_setting # noqa: PLC0415 circular import if not get_system_setting("enable_github"): return - # Check if we have github info related to the product - if GITHUB_PKey.objects.filter(product=prod).count() == 0: + if not GITHUB_PKey.objects.filter(product=prod).exists(): return - + # Get the GitHub product configuration github_product = GITHUB_PKey.objects.get(product=prod) if github_product is None: logger.error("Unable to get project key") return - - github_conf = github_product.git_conf + # Check if we have github info related to the finding + if not GITHUB_Issue.objects.filter(finding=find).exists(): + return + # Get the GitHub issue related to the finding g_issue = GITHUB_Issue.objects.get(finding=find) + if not g_issue: + logger.error("Unable to get github issue") + return try: - g_ctx = Github(auth=Auth.Token(github_conf.api_key)) + g_ctx = Github(auth=Auth.Token(github_product.git_conf.api_key)) repo = g_ctx.get_repo(github_product.git_project) issue = repo.get_issue(int(g_issue.issue_id)) except: @@ -46,25 +50,29 @@ def reopen_external_issue_github(find, note, prod, eng): def close_external_issue_github(find, note, prod, eng): - + # Ensure the system setting for GitHub integration is enabled from dojo.utils import get_system_setting # noqa: PLC0415 circular import if not get_system_setting("enable_github"): return - # Check if we have github info related to the product - if GITHUB_PKey.objects.filter(product=prod).count() == 0: + if not GITHUB_PKey.objects.filter(product=prod).exists(): return - + # Get the GitHub product configuration github_product = GITHUB_PKey.objects.get(product=prod) if github_product is None: logger.error("Unable to get project key") return - - github_conf = github_product.git_conf + # Check if we have github info related to the finding + if not GITHUB_Issue.objects.filter(finding=find).exists(): + return + # Get the GitHub issue related to the finding g_issue = GITHUB_Issue.objects.get(finding=find) + if not g_issue: + logger.error("Unable to get github issue") + return try: - g_ctx = Github(auth=Auth.Token(github_conf.api_key)) + g_ctx = Github(auth=Auth.Token(github_product.git_conf.api_key)) repo = g_ctx.get_repo(github_product.git_project) issue = repo.get_issue(int(g_issue.issue_id)) except: @@ -77,25 +85,29 @@ def close_external_issue_github(find, note, prod, eng): def update_external_issue_github(find, prod, eng): - + # Ensure the system setting for GitHub integration is enabled from dojo.utils import get_system_setting # noqa: PLC0415 circular import if not get_system_setting("enable_github"): return - # Check if we have github info related to the product - if GITHUB_PKey.objects.filter(product=prod).count() == 0: + if not GITHUB_PKey.objects.filter(product=prod).exists(): return - + # Get the GitHub product configuration github_product = GITHUB_PKey.objects.get(product=prod) if github_product is None: logger.error("Unable to get project key") return - - github_conf = github_product.git_conf + # Check if we have github info related to the finding + if not GITHUB_Issue.objects.filter(finding=find).exists(): + return + # Get the GitHub issue related to the finding g_issue = GITHUB_Issue.objects.get(finding=find) + if not g_issue: + logger.error("Unable to get github issue") + return try: - g_ctx = Github(auth=Auth.Token(github_conf.api_key)) + g_ctx = Github(auth=Auth.Token(github_product.git_conf.api_key)) repo = g_ctx.get_repo(github_product.git_project) issue = repo.get_issue(int(g_issue.issue_id)) issue.edit(title=find.title, body=github_body(find), labels=["defectdojo", "security / " + find.severity]) @@ -105,32 +117,27 @@ def update_external_issue_github(find, prod, eng): def add_external_issue_github(find, prod, eng): - + # Ensure the system setting for GitHub integration is enabled from dojo.utils import get_system_setting # noqa: PLC0415 circular import if not get_system_setting("enable_github"): return - # Check if we have github info related to the product - if GITHUB_PKey.objects.filter(product=prod).count() == 0: - logger.debug("cannot find github conf for this product") + if not GITHUB_PKey.objects.filter(product=prod).exists(): return - - github_pkey = GITHUB_PKey.objects.get(product=prod) - if github_pkey is None: - logger.error("Unable to get product conf") + # Get the GitHub product configuration + github_product = GITHUB_PKey.objects.get(product=prod) + if github_product is None: + logger.error("Unable to get project key") return - - github_conf = github_pkey.git_conf - # We push only active and verified issues if "Active" in find.status() and ("Verified" in find.status() and get_system_setting("enforce_verified_status", True)): eng = Engagement.objects.get(test=find.test) prod = Product.objects.get(engagement=eng) github_product_key = GITHUB_PKey.objects.get(product=prod) - logger.info("Create issue with github profile: " + str(github_conf) + " on product: " + str(github_product_key)) + logger.info("Create issue with github profile: " + str(github_product_key.git_conf) + " on product: " + str(github_product_key)) try: - g = Github(auth=Auth.Token(github_conf.api_key)) + g = Github(auth=Auth.Token(github_product_key.git_conf.api_key)) user = g.get_user() logger.debug("logged in with github user: " + user.login) logger.debug("Look for project: " + github_product_key.git_project) From e188bb4ee7670ebd05069ebdcfed1ad97663657a Mon Sep 17 00:00:00 2001 From: testaccount90009 <122134756+testaccount90009@users.noreply.github.com> Date: Wed, 17 Dec 2025 09:34:44 -0800 Subject: [PATCH 03/22] autoWidth false (#13884) autoWidth false on engagement/product to fix formatting --- dojo/templates/dojo/engagements_all.html | 1 + dojo/templates/dojo/product.html | 1 + 2 files changed, 2 insertions(+) diff --git a/dojo/templates/dojo/engagements_all.html b/dojo/templates/dojo/engagements_all.html index ec31cefd2cd..e5ded4053e0 100644 --- a/dojo/templates/dojo/engagements_all.html +++ b/dojo/templates/dojo/engagements_all.html @@ -241,6 +241,7 @@

{% if enable_table_filtering %} var dojoTable = $('#engagements').DataTable({ colReorder: true, + autoWidth: false, columnDefs: [ { "orderable": false, diff --git a/dojo/templates/dojo/product.html b/dojo/templates/dojo/product.html index 09636f6b656..2b2a3c6a8ff 100644 --- a/dojo/templates/dojo/product.html +++ b/dojo/templates/dojo/product.html @@ -336,6 +336,7 @@

); }, colReorder: true, + autoWidth: false, "columns": [ { "data": "action", "searchable": false }, { "data": "product" }, From b3049f91ea2b58c9c8603cbde432d2f20965cb94 Mon Sep 17 00:00:00 2001 From: valentijnscholten Date: Wed, 17 Dec 2025 21:14:17 +0100 Subject: [PATCH 04/22] push_to_jira: fix pushing to JIRA during import/reimport in asynchronous mode (#13916) * push_to_jira: add logging * push_to_jira: add logging * push to jira: fix passing of parameters in async mode * push to jira: fix passing of parameters in async mode --- dojo/api_v2/serializers.py | 2 ++ dojo/api_v2/views.py | 2 +- dojo/engagement/views.py | 24 ++++++++++++++++++++++++ dojo/finding/deduplication.py | 2 ++ dojo/finding/helper.py | 14 ++++++++++++-- dojo/importers/default_importer.py | 14 ++++++++++++-- dojo/test/views.py | 24 ++++++++++++++++++++++++ 7 files changed, 77 insertions(+), 5 deletions(-) diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index 2f884b3bb4a..4cada0d1dc6 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -2281,6 +2281,7 @@ def process_scan( Raises exceptions in the event of an error """ try: + logger.debug(f"process_scan called with context: {context}") start_time = time.perf_counter() importer = self.get_importer(**context) context["test"], _, _, _, _, _, _ = importer.process_scan( @@ -2558,6 +2559,7 @@ def process_scan( """ statistics_before, statistics_delta = None, None try: + logger.debug(f"process_scan called with context: {context}") start_time = time.perf_counter() if test := context.get("test"): statistics_before = test.statistics diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index bdde57955f2..b321c35d558 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -2515,7 +2515,7 @@ def perform_create(self, serializer): jira_driver = engagement or (product or None) if jira_project := (jira_helper.get_jira_project(jira_driver) if jira_driver else None): push_to_jira = push_to_jira or jira_project.push_all_issues - # logger.debug(f"push_to_jira: {push_to_jira}") + serializer.save(push_to_jira=push_to_jira) def get_queryset(self): diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py index 706210b569c..a726f514421 100644 --- a/dojo/engagement/views.py +++ b/dojo/engagement/views.py @@ -936,6 +936,30 @@ def import_findings( ) -> str | None: """Attempt to import with all the supplied information""" try: + # Log only user-entered form values, excluding internal objects + user_values = { + "scan_type": context.get("scan_type"), + "scan_date": context.get("scan_date"), + "minimum_severity": context.get("minimum_severity"), + "active": context.get("active"), + "verified": context.get("verified"), + "test_title": context.get("test_title"), + "tags": context.get("tags"), + "version": context.get("version"), + "branch_tag": context.get("branch_tag"), + "build_id": context.get("build_id"), + "commit_hash": context.get("commit_hash"), + "service": context.get("service"), + "close_old_findings": context.get("close_old_findings"), + "apply_tags_to_findings": context.get("apply_tags_to_findings"), + "apply_tags_to_endpoints": context.get("apply_tags_to_endpoints"), + "close_old_findings_product_scope": context.get("close_old_findings_product_scope"), + "group_by": context.get("group_by"), + "create_finding_groups_for_all_findings": context.get("create_finding_groups_for_all_findings"), + "push_to_jira": context.get("push_to_jira"), + "push_all_jira_issues": context.get("push_all_jira_issues"), + } + logger.debug(f"import_findings called with user values: {user_values}") importer_client = self.get_importer(context) context["test"], _, finding_count, closed_finding_count, _, _, _ = importer_client.process_scan( context.pop("scan", None), diff --git a/dojo/finding/deduplication.py b/dojo/finding/deduplication.py index 7297e55fef4..d11c66b17ba 100644 --- a/dojo/finding/deduplication.py +++ b/dojo/finding/deduplication.py @@ -27,6 +27,7 @@ def get_finding_models_for_deduplication(finding_ids): """ if not finding_ids: + logger.debug("get_finding_models_for_deduplication called with no finding_ids") return [] return list( @@ -543,6 +544,7 @@ def dedupe_batch_of_findings(findings, *args, **kwargs): return batch_dedupe_method(findings, *args, **kwargs) if not findings: + logger.debug("dedupe_batch_of_findings called with no findings") return None enabled = System_Settings.objects.get().enable_deduplication diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index 19bf9ee6d99..a0cf29f2120 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -470,8 +470,8 @@ def post_process_finding_save_internal(finding, dedupe_option=True, rules_option @app.task def post_process_findings_batch_signature(finding_ids, *args, dedupe_option=True, rules_option=True, product_grading_option=True, issue_updater_option=True, push_to_jira=False, user=None, **kwargs): - return post_process_findings_batch(finding_ids, dedupe_option, rules_option, product_grading_option, - issue_updater_option, push_to_jira, user, **kwargs) + return post_process_findings_batch(finding_ids, *args, dedupe_option=dedupe_option, rules_option=rules_option, product_grading_option=product_grading_option, issue_updater_option=issue_updater_option, push_to_jira=push_to_jira, user=user, **kwargs) + # Pass arguments as keyword arguments to ensure Celery properly serializes them @dojo_async_task @@ -479,13 +479,21 @@ def post_process_findings_batch_signature(finding_ids, *args, dedupe_option=True def post_process_findings_batch(finding_ids, *args, dedupe_option=True, rules_option=True, product_grading_option=True, issue_updater_option=True, push_to_jira=False, user=None, **kwargs): + logger.debug( + f"post_process_findings_batch called: finding_ids_count={len(finding_ids) if finding_ids else 0}, " + f"args={args}, dedupe_option={dedupe_option}, rules_option={rules_option}, " + f"product_grading_option={product_grading_option}, issue_updater_option={issue_updater_option}, " + f"push_to_jira={push_to_jira}, user={user.id if user else None}, kwargs={kwargs}", + ) if not finding_ids: return system_settings = System_Settings.objects.get() # use list() to force a complete query execution and related objects to be loaded once + logger.debug(f"getting finding models for batch deduplication with: {len(finding_ids)} findings") findings = get_finding_models_for_deduplication(finding_ids) + logger.debug(f"found {len(findings)} findings for batch deduplication") if not findings: logger.debug(f"no findings found for batch deduplication with IDs: {finding_ids}") @@ -517,6 +525,8 @@ def post_process_findings_batch(finding_ids, *args, dedupe_option=True, rules_op jira_helper.push_to_jira(finding) else: jira_helper.push_to_jira(finding.finding_group) + else: + logger.debug("push_to_jira is False, not ushing to JIRA") @receiver(pre_delete, sender=Finding) diff --git a/dojo/importers/default_importer.py b/dojo/importers/default_importer.py index 63f41b8f744..3030e7f4e4e 100644 --- a/dojo/importers/default_importer.py +++ b/dojo/importers/default_importer.py @@ -238,22 +238,30 @@ def process_findings( # Categorize this finding as a new one new_findings.append(finding) # all data is already saved on the finding, we only need to trigger post processing in batches + logger.debug("process_findings: self.push_to_jira=%s, self.findings_groups_enabled=%s, self.group_by=%s", + self.push_to_jira, self.findings_groups_enabled, self.group_by) push_to_jira = self.push_to_jira and (not self.findings_groups_enabled or not self.group_by) + logger.debug("process_findings: computed push_to_jira=%s", push_to_jira) batch_finding_ids.append(finding.id) # If batch is full or we're at the end, dispatch one batched task if len(batch_finding_ids) >= batch_max_size or is_final_finding: finding_ids_batch = list(batch_finding_ids) batch_finding_ids.clear() + logger.debug("process_findings: dispatching batch with push_to_jira=%s (batch_size=%d, is_final=%s)", + push_to_jira, len(finding_ids_batch), is_final_finding) if we_want_async(async_user=self.user): - finding_helper.post_process_findings_batch_signature( + signature = finding_helper.post_process_findings_batch_signature( finding_ids_batch, dedupe_option=True, rules_option=True, product_grading_option=True, issue_updater_option=True, push_to_jira=push_to_jira, - )() + ) + logger.debug("process_findings: signature created with push_to_jira=%s, signature.kwargs=%s", + push_to_jira, signature.kwargs) + signature() else: finding_helper.post_process_findings_batch( finding_ids_batch, @@ -279,6 +287,8 @@ def process_findings( jira_helper.push_to_jira(findings[0].finding_group) else: jira_helper.push_to_jira(findings[0]) + else: + logger.debug("push_to_jira is False, not pushing to JIRA") # Note: All chord batching is now handled within the loop above diff --git a/dojo/test/views.py b/dojo/test/views.py index 5db820d6c3a..d2bf11092e7 100644 --- a/dojo/test/views.py +++ b/dojo/test/views.py @@ -964,6 +964,30 @@ def reimport_findings( ) -> str | None: """Attempt to import with all the supplied information""" try: + # Log only user-entered form values, excluding internal objects + user_values = { + "scan_type": context.get("scan_type"), + "scan_date": context.get("scan_date"), + "minimum_severity": context.get("minimum_severity"), + "active": context.get("active"), + "verified": context.get("verified"), + "tags": context.get("tags"), + "version": context.get("version"), + "branch_tag": context.get("branch_tag"), + "build_id": context.get("build_id"), + "commit_hash": context.get("commit_hash"), + "service": context.get("service"), + "close_old_findings": context.get("close_old_findings"), + "apply_tags_to_findings": context.get("apply_tags_to_findings"), + "apply_tags_to_endpoints": context.get("apply_tags_to_endpoints"), + "close_old_findings_product_scope": context.get("close_old_findings_product_scope"), + "group_by": context.get("group_by"), + "create_finding_groups_for_all_findings": context.get("create_finding_groups_for_all_findings"), + "push_to_jira": context.get("push_to_jira"), + "push_all_jira_issues": context.get("push_all_jira_issues"), + "do_not_reactivate": context.get("do_not_reactivate"), + } + logger.debug(f"reimport_findings called with user values: {user_values}") importer_client = self.get_reimporter(context) ( context["test"], From 26fba7ff2c137c176abbddc4c057748921577390 Mon Sep 17 00:00:00 2001 From: manuelsommer <47991713+manuel-sommer@users.noreply.github.com> Date: Fri, 19 Dec 2025 18:00:16 +0100 Subject: [PATCH 05/22] Add DD_SOCIAL_AUTH_CREATE_USER_MAPPING to docs (#13929) --- docs/content/en/customize_dojo/user_management/configure_sso.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/content/en/customize_dojo/user_management/configure_sso.md b/docs/content/en/customize_dojo/user_management/configure_sso.md index 1aaed3b95b3..e86eb3fb1f5 100644 --- a/docs/content/en/customize_dojo/user_management/configure_sso.md +++ b/docs/content/en/customize_dojo/user_management/configure_sso.md @@ -557,6 +557,7 @@ You can also optionally set the following variables: {{< highlight python >}} DD_SOCIAL_AUTH_OIDC_ID_KEY=(str, ''), #the key associated with the OIDC user IDs DD_SOCIAL_AUTH_OIDC_USERNAME_KEY=(str, ''), #the key associated with the OIDC usernames + DD_SOCIAL_AUTH_CREATE_USER_MAPPING=(str, "username"), #could also be email or fullname DD_SOCIAL_AUTH_OIDC_WHITELISTED_DOMAINS=(list, ['']), #list of domains allowed for login DD_SOCIAL_AUTH_OIDC_JWT_ALGORITHMS=(list, ["RS256","HS256"]), DD_SOCIAL_AUTH_OIDC_ID_TOKEN_ISSUER=(str, ''), From 466c28f44ad45892d6c760dbeda7e7f68944a0ba Mon Sep 17 00:00:00 2001 From: Valentijn Scholten Date: Sat, 20 Dec 2025 17:54:03 +0100 Subject: [PATCH 06/22] tags: allow setting tag truncate length --- dojo/settings/settings.dist.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 003dcb0b3ed..6798d66cfa1 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -92,6 +92,8 @@ DD_CELERY_PASS_MODEL_BY_ID=(str, True), DD_CELERY_LOG_LEVEL=(str, "INFO"), DD_TAG_BULK_ADD_BATCH_SIZE=(int, 1000), + # Tagulous slug truncate unique setting. Set to -1 to use tagulous internal default (5) + DD_TAGULOUS_SLUG_TRUNCATE_UNIQUE=(int, -1), # Minimum number of model updated instances before search index updates as performaed asynchronously. Set to -1 to disable async updates. DD_WATSON_ASYNC_INDEX_UPDATE_THRESHOLD=(int, 10), DD_WATSON_ASYNC_INDEX_UPDATE_BATCH_SIZE=(int, 1000), @@ -1875,6 +1877,11 @@ def saml2_attrib_map_format(din): # using 'element' for width should take width from css defined in template, but it doesn't. So set to 70% here. TAGULOUS_AUTOCOMPLETE_SETTINGS = {"placeholder": "Enter some tags (comma separated, use enter to select / create a new tag)", "width": "70%"} +# Configure tagulous slug truncate unique setting if provided +# If not set (value is -1), tagulous will use its internal default value of 5 +if (truncate_unique := env("DD_TAGULOUS_SLUG_TRUNCATE_UNIQUE")) != -1: + TAGULOUS_SLUG_TRUNCATE_UNIQUE = truncate_unique + EDITABLE_MITIGATED_DATA = env("DD_EDITABLE_MITIGATED_DATA") # FEATURE_FINDING_GROUPS feature is moved to system_settings, will be removed from settings file From 02a0b2e7c01086b9e02fdeb871217cb69597d0c7 Mon Sep 17 00:00:00 2001 From: Valentijn Scholten Date: Sat, 20 Dec 2025 18:18:00 +0100 Subject: [PATCH 07/22] closed finding metrics: use mitigated_date instead of created_date --- .../unit_metrics_additional_data.json | 4 +- dojo/metrics/utils.py | 9 +- dojo/product/views.py | 28 +++++- unittests/test_metrics_queries.py | 96 ++++++++++++++++++- 4 files changed, 126 insertions(+), 11 deletions(-) diff --git a/dojo/fixtures/unit_metrics_additional_data.json b/dojo/fixtures/unit_metrics_additional_data.json index 721e47eaac6..246a0534133 100644 --- a/dojo/fixtures/unit_metrics_additional_data.json +++ b/dojo/fixtures/unit_metrics_additional_data.json @@ -175,7 +175,7 @@ "description": "TEST finding", "mitigated_by": null, "reporter": 2, - "mitigated": null, + "mitigated": "2018-01-02T00:00:00Z", "active": false, "line": 100, "under_review": false, @@ -416,7 +416,7 @@ "description": "test finding", "mitigated_by": null, "reporter": 1, - "mitigated": null, + "mitigated": "2017-12-28T00:00:00Z", "active": true, "line": 123, "under_review": false, diff --git a/dojo/metrics/utils.py b/dojo/metrics/utils.py index c3fa891aeb9..3c7750e5130 100644 --- a/dojo/metrics/utils.py +++ b/dojo/metrics/utils.py @@ -76,8 +76,13 @@ def finding_queries( # Filter by the date ranges supplied all_findings_within_date_range = all_authorized_findings.filter(date__range=[start_date, end_date]) - # Get the list of closed and risk accepted findings - closed_filtered_findings = all_findings_within_date_range.filter(CLOSED_FINDINGS_QUERY) + # Get the list of closed findings filtered by mitigated date (not discovery date) + # This ensures findings closed within the date range are included even if discovered outside it + closed_filtered_findings = all_authorized_findings.filter( + CLOSED_FINDINGS_QUERY, + mitigated__range=[start_date, end_date], + mitigated__isnull=False, + ) accepted_filtered_findings = all_findings_within_date_range.filter(ACCEPTED_FINDINGS_QUERY) active_filtered_findings = all_findings_within_date_range.filter(OPEN_FINDINGS_QUERY) diff --git a/dojo/product/views.py b/dojo/product/views.py index 6884877398a..837e0bdfefc 100644 --- a/dojo/product/views.py +++ b/dojo/product/views.py @@ -438,7 +438,8 @@ def finding_queries(request, prod): filters["new_verified"] = findings_qs.filter(finding_helper.VERIFIED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") filters["open"] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") filters["inactive"] = findings_qs.filter(finding_helper.INACTIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") - filters["closed"] = findings_qs.filter(finding_helper.CLOSED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + # Filter closed findings by mitigated date (not discovery date) to show findings closed within the date range + filters["closed"] = findings_qs.filter(finding_helper.CLOSED_FINDINGS_QUERY).filter(mitigated__range=[start_date, end_date], mitigated__isnull=False).order_by("mitigated") filters["false_positive"] = findings_qs.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") filters["out_of_scope"] = findings_qs.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") filters["all"] = findings_qs.order_by("date") @@ -610,7 +611,8 @@ def view_product_metrics(request, pid): all_findings = list(filters.get("all", []).values("id", "date", "severity")) open_findings = list(filters.get("open", []).values("id", "date", "mitigated", "severity")) - closed_findings = list(filters.get("closed", []).values("id", "date", "severity")) + # Include mitigated date for closed findings to group by when they were closed, not discovered + closed_findings = list(filters.get("closed", []).values("id", "date", "mitigated", "severity")) accepted_findings = list(filters.get("accepted", []).values("id", "date", "severity")) """ @@ -681,11 +683,29 @@ def view_product_metrics(request, pid): if open_objs_by_severity.get(finding.get("severity")) is not None: open_objs_by_severity[finding.get("severity")] += 1 - # Close findings + # Close findings - group by mitigated date, not discovery date elif closed_findings_dict.get(finding.get("id", None)): - if unix_timestamp in open_close_weekly: + # Find the closed finding to get its mitigated date + closed_finding = next((f for f in closed_findings if f.get("id") == finding.get("id")), None) + if closed_finding and closed_finding.get("mitigated"): + # Use mitigated date for grouping closed findings + mitigated_date = closed_finding.get("mitigated") + mitigated_date_only = mitigated_date.date() if isinstance(mitigated_date, datetime) else mitigated_date + iso_cal = mitigated_date_only.isocalendar() + mitigated_week_start = iso_to_gregorian(iso_cal[0], iso_cal[1], 1) + mitigated_html_date = mitigated_week_start.strftime("%m/%d
%Y
") + mitigated_unix_timestamp = (tcalendar.timegm(mitigated_week_start.timetuple()) * 1000) + + if mitigated_unix_timestamp in open_close_weekly: + open_close_weekly[mitigated_unix_timestamp]["closed"] += 1 + else: + open_close_weekly[mitigated_unix_timestamp] = {"closed": 1, "open": 0, "accepted": 0} + open_close_weekly[mitigated_unix_timestamp]["week"] = mitigated_html_date + elif unix_timestamp in open_close_weekly: + # Fallback to discovery date if mitigated date is not available open_close_weekly[unix_timestamp]["closed"] += 1 else: + # Fallback to discovery date if mitigated date is not available open_close_weekly[unix_timestamp] = {"closed": 1, "open": 0, "accepted": 0} open_close_weekly[unix_timestamp]["week"] = html_date # Optimization: count severity level on server side diff --git a/unittests/test_metrics_queries.py b/unittests/test_metrics_queries.py index 4d18ebc1ef6..751122c54ac 100644 --- a/unittests/test_metrics_queries.py +++ b/unittests/test_metrics_queries.py @@ -8,7 +8,7 @@ from django.urls import reverse from dojo.metrics import utils -from dojo.models import Product_Type, User +from dojo.models import Engagement, Finding, Product, Product_Type, Test, User from .dojo_test_case import DojoTestCase @@ -31,12 +31,12 @@ def add(*args, **kwargs): FINDING_8 = {"id": 240, "date": date(2018, 1, 1), "severity": "High", "active": True, "verified": False, "false_p": False, "duplicate": True, "duplicate_finding_id": 2, "out_of_scope": False, "risk_accepted": False, "under_review": False, "is_mitigated": False, "mitigated": None, "mitigated_by_id": None, "reporter_id": 1, "numerical_severity": "S0"} FINDING_9 = {"id": 241, "date": date(2018, 1, 1), "severity": "High", "active": False, "verified": False, "false_p": False, "duplicate": True, "duplicate_finding_id": 2, "out_of_scope": False, "risk_accepted": True, "under_review": False, "is_mitigated": False, "mitigated": None, "mitigated_by_id": None, "reporter_id": 1, "numerical_severity": "S0"} FINDING_10 = {"id": 242, "date": date(2018, 1, 1), "severity": "High", "active": False, "verified": False, "false_p": False, "duplicate": True, "duplicate_finding_id": 2, "out_of_scope": False, "risk_accepted": True, "under_review": False, "is_mitigated": False, "mitigated": None, "mitigated_by_id": None, "reporter_id": 1, "numerical_severity": "S0"} -FINDING_11 = {"id": 243, "date": date(2017, 12, 31), "severity": "High", "active": False, "verified": False, "false_p": False, "duplicate": False, "duplicate_finding_id": None, "out_of_scope": False, "risk_accepted": True, "under_review": False, "is_mitigated": True, "mitigated": None, "mitigated_by_id": None, "reporter_id": 2, "numerical_severity": "S0"} +FINDING_11 = {"id": 243, "date": date(2017, 12, 31), "severity": "High", "active": False, "verified": False, "false_p": False, "duplicate": False, "duplicate_finding_id": None, "out_of_scope": False, "risk_accepted": True, "under_review": False, "is_mitigated": True, "mitigated": datetime(2018, 1, 2, tzinfo=zoneinfo.ZoneInfo("UTC")), "mitigated_by_id": None, "reporter_id": 2, "numerical_severity": "S0"} FINDING_12 = {"id": 244, "date": date(2017, 12, 29), "severity": "Low", "active": True, "verified": True, "false_p": False, "duplicate": False, "duplicate_finding_id": None, "out_of_scope": False, "risk_accepted": False, "under_review": False, "is_mitigated": False, "mitigated": None, "mitigated_by_id": None, "reporter_id": 1, "numerical_severity": "S0"} FINDING_13 = {"id": 245, "date": date(2017, 12, 27), "severity": "Low", "active": False, "verified": False, "false_p": False, "duplicate": True, "duplicate_finding_id": 22, "out_of_scope": False, "risk_accepted": False, "under_review": False, "is_mitigated": False, "mitigated": None, "mitigated_by_id": None, "reporter_id": 1, "numerical_severity": "S0"} FINDING_14 = {"id": 246, "date": date(2018, 1, 2), "severity": "Low", "active": False, "verified": False, "false_p": False, "duplicate": True, "duplicate_finding_id": 22, "out_of_scope": False, "risk_accepted": False, "under_review": False, "is_mitigated": False, "mitigated": None, "mitigated_by_id": None, "reporter_id": 1, "numerical_severity": "S0"} FINDING_15 = {"id": 247, "date": date(2018, 1, 3), "severity": "Low", "active": False, "verified": False, "false_p": False, "duplicate": True, "duplicate_finding_id": None, "out_of_scope": False, "risk_accepted": False, "under_review": False, "is_mitigated": False, "mitigated": None, "mitigated_by_id": None, "reporter_id": 1, "numerical_severity": "S0"} -FINDING_16 = {"id": 248, "date": date(2017, 12, 27), "severity": "Low", "active": True, "verified": True, "false_p": False, "duplicate": False, "duplicate_finding_id": None, "out_of_scope": False, "risk_accepted": False, "under_review": False, "is_mitigated": True, "mitigated": None, "mitigated_by_id": None, "reporter_id": 1, "numerical_severity": "S0"} +FINDING_16 = {"id": 248, "date": date(2017, 12, 27), "severity": "Low", "active": True, "verified": True, "false_p": False, "duplicate": False, "duplicate_finding_id": None, "out_of_scope": False, "risk_accepted": False, "under_review": False, "is_mitigated": True, "mitigated": datetime(2017, 12, 28, tzinfo=zoneinfo.ZoneInfo("UTC")), "mitigated_by_id": None, "reporter_id": 1, "numerical_severity": "S0"} FINDING_17 = {"id": 249, "date": date(2018, 1, 4), "severity": "Low", "active": False, "verified": False, "false_p": False, "duplicate": True, "duplicate_finding_id": 224, "out_of_scope": False, "risk_accepted": False, "under_review": False, "is_mitigated": False, "mitigated": None, "mitigated_by_id": None, "reporter_id": 1, "numerical_severity": "S0"} @@ -163,6 +163,96 @@ def test_finding_queries(self, mock_timezone): self.assertIsInstance(finding_queries["start_date"], datetime) self.assertIsInstance(finding_queries["end_date"], datetime) + @patch("django.utils.timezone.now") + def test_closed_findings_filtered_by_mitigated_date(self, mock_timezone): + """ + Test that closed findings are filtered by mitigated date, not discovery date. + + This test verifies the fix for issue #9735: findings discovered outside the date + range but closed within it should appear in closed metrics. + """ + mock_datetime = datetime(2020, 12, 9, tzinfo=zoneinfo.ZoneInfo("UTC")) + mock_timezone.return_value = mock_datetime + + # Get a test product and engagement + product = Product.objects.first() + if not product: + self.skipTest("No product available in test data") + engagement = Engagement.objects.filter(product=product).first() + if not engagement: + self.skipTest("No engagement available in test data") + test = Test.objects.filter(engagement=engagement).first() + if not test: + self.skipTest("No test available in test data") + + # Create a finding discovered BEFORE the date range but closed WITHIN it + # Date range: 2017-12-26 to 2018-01-05 + finding_discovered_before = Finding.objects.create( + title="Finding discovered before range, closed within range", + description="Test finding", + severity="High", + date=date(2017, 10, 1), # Discovered before range + test=test, + reporter=self.request.user, + active=False, + is_mitigated=True, + mitigated=datetime(2018, 1, 2, tzinfo=zoneinfo.ZoneInfo("UTC")), # Closed within range + ) + + # Create a finding discovered WITHIN the date range but closed AFTER it + finding_closed_after = Finding.objects.create( + title="Finding discovered within range, closed after range", + description="Test finding", + severity="Medium", + date=date(2017, 12, 30), # Discovered within range + test=test, + reporter=self.request.user, + active=False, + is_mitigated=True, + mitigated=datetime(2018, 2, 1, tzinfo=zoneinfo.ZoneInfo("UTC")), # Closed after range + ) + + # Create a finding discovered and closed WITHIN the date range + finding_both_within = Finding.objects.create( + title="Finding discovered and closed within range", + description="Test finding", + severity="Low", + date=date(2017, 12, 30), # Discovered within range + test=test, + reporter=self.request.user, + active=False, + is_mitigated=True, + mitigated=datetime(2018, 1, 3, tzinfo=zoneinfo.ZoneInfo("UTC")), # Closed within range + ) + + try: + product_types = [] + finding_queries = utils.finding_queries( + product_types, + self.request, + ) + + closed_findings = finding_queries["closed"] + closed_ids = list(closed_findings.values_list("id", flat=True)) + + # The finding discovered before but closed within should appear + self.assertIn(finding_discovered_before.id, closed_ids, + "Finding discovered before range but closed within range should appear in closed metrics") + + # The finding discovered within but closed after should NOT appear + self.assertNotIn(finding_closed_after.id, closed_ids, + "Finding discovered within range but closed after range should NOT appear in closed metrics") + + # The finding discovered and closed within should appear + self.assertIn(finding_both_within.id, closed_ids, + "Finding discovered and closed within range should appear in closed metrics") + + finally: + # Clean up test findings + finding_discovered_before.delete() + finding_closed_after.delete() + finding_both_within.delete() + class EndpointQueriesTest(DojoTestCase): fixtures = ["dojo_testdata.json"] From f87ffbdbaf2ca98e05821e3d97966a2df6877982 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Mon, 22 Dec 2025 13:23:12 +0100 Subject: [PATCH 08/22] feat(HELM): Make HPA more Argo-friendly (#13882) Signed-off-by: kiblik <5609770+kiblik@users.noreply.github.com> --- helm/defectdojo/Chart.yaml | 4 +++- helm/defectdojo/templates/celery-worker-deployment.yaml | 2 ++ helm/defectdojo/templates/django-deployment.yaml | 2 ++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 0bbd413b257..0745888f481 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -34,4 +34,6 @@ dependencies: # description: Critical bug annotations: artifacthub.io/prerelease: "true" - artifacthub.io/changes: "" + artifacthub.io/changes: | + - kind: fixed + description: Drop 'replicas' when HPA is in place diff --git a/helm/defectdojo/templates/celery-worker-deployment.yaml b/helm/defectdojo/templates/celery-worker-deployment.yaml index 30620c91155..02902d876e0 100644 --- a/helm/defectdojo/templates/celery-worker-deployment.yaml +++ b/helm/defectdojo/templates/celery-worker-deployment.yaml @@ -21,7 +21,9 @@ metadata: name: {{ $fullName }}-celery-worker namespace: {{ .Release.Namespace }} spec: + {{ if (not .Values.celery.worker.autoscaling.enabled) -}} replicas: {{ .Values.celery.worker.replicas }} + {{- end }} {{- with .Values.revisionHistoryLimit }} revisionHistoryLimit: {{ . }} {{- end }} diff --git a/helm/defectdojo/templates/django-deployment.yaml b/helm/defectdojo/templates/django-deployment.yaml index 0a5e86ff420..76a5cd69445 100644 --- a/helm/defectdojo/templates/django-deployment.yaml +++ b/helm/defectdojo/templates/django-deployment.yaml @@ -20,7 +20,9 @@ metadata: name: {{ $fullName }}-django namespace: {{ .Release.Namespace }} spec: + {{ if (not .Values.django.autoscaling.enabled) -}} replicas: {{ .Values.django.replicas }} + {{- end }} {{- with .Values.django.strategy }} strategy: {{- toYaml . | nindent 4 }} From 54bf9956d9e754da187648c4620f675b6e9432bf Mon Sep 17 00:00:00 2001 From: valentijnscholten Date: Mon, 22 Dec 2025 13:23:19 +0100 Subject: [PATCH 09/22] docs: add dedupe batching note to 2.53 upgrade notes (#13914) --- docs/content/en/open_source/upgrading/2.53.md | 30 ++++++++++++++++--- 1 file changed, 26 insertions(+), 4 deletions(-) diff --git a/docs/content/en/open_source/upgrading/2.53.md b/docs/content/en/open_source/upgrading/2.53.md index b6970b87fc9..191a9f83025 100644 --- a/docs/content/en/open_source/upgrading/2.53.md +++ b/docs/content/en/open_source/upgrading/2.53.md @@ -2,7 +2,7 @@ title: 'Upgrading to DefectDojo Version 2.53.x' toc_hide: true weight: -20251103 -description: "Helm chart: changes for initializer annotations + Replaced Redis with Valkey + HPA & PDB support" +description: "Helm chart: changes for initializer annotations + Replaced Redis with Valkey + HPA & PDB support + Batch Deduplication" --- ## Helm Chart Changes @@ -17,9 +17,9 @@ Added Helm chart support for Celery and Django deployments for Horizontal Pod Au ### Breaking changes -#### Valkey +#### Valkey -##### Renamed values +##### Renamed values HELM values had been changed to the following: - `createRedisSecret` → `createValkeySecret` @@ -40,7 +40,7 @@ If an external Redis instance is being used, set the parameter `valkey.enabled` 0. As always, perform a backup of your instance 1. If you would like to be 100% sure that you do not miss any async event (triggered deduplication, email notification, ...) it is recommended to perform the following substeps (if your system is not in production and/or you are willing to miss some notifications or postpone deduplication to a later time, feel free to skip these substeps) 0. Perform the following steps with your previous version of HELM chart (not with the upgraded one - you might lose your data) - 1. Downscale all producers of async tasks: + 1. Downscale all producers of async tasks: - Set `django.replicas` to 0 (if you used HPA, adjust it based on your needs) - Set `celery.beat.replicas` to 0 (if you used HPA, adjust it based on your needs) - Do not change `celery.worker.replicas` (they are responsible for processing your async tasks) @@ -89,4 +89,26 @@ Both `extraAnnotations` and `initializer.podAnnotations` will now be properly ap Reimport will update existing findings `fix_available` and `fix_version` fields based on the incoming scan report. +## Batch Deduplication + +Before 2.53.0 Defect Dojo has been deduplicating new or updated findings one-by-one. This works well for small imports and has the benefit of an easy to understand codebase and test suite. For larger imports however the performance is bad and resource usage is (very) high. A 1000+ finding import can cause a celery worker to spend minutes on deduplication. + +PR [13491](https://github.com/DefectDojo/django-DefectDojo/pull/13491) changes the deduplication process for import and reimport to be done in batches. This biggest benefit is that there now will be 1 database query per batch (1000 findings), instead of 1 query per finding (1000 queries). + +A quick test with the `jfrog_xray_unified/very_many_vulns.json` samples scan (10k findings) shwo the obvious huge improvement in deduplication time. Please note that we're not only doing this for performance, but also to reduce the resources (cloud cost) needed to run Defect Dojo. + +initial import (no duplicates): +| branch | import time | dedupe time | total time | +|--------|:-----------:|:-----------:|:-----------:| +| dev | ~200s | ~400s | ~600s | +| dedupe-batching | ~190s | _~12s_ | ~200s | + +second import into the same product (all duplicates): +initial import (no duplicates): +| branch | import time | dedupe time | total time | +|--------|:-----------:|:-----------:|:-----------:| +| dev | ~200s | ~400s | ~600s | +| dedupe-batching | ~190s | _~180s_ | ~370s | + + There are no other special instructions for upgrading to 2.53.x. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.53.0) for the contents of the release. From 659531e0ea522ea3dccbf3f5a10ef226e19c5eb9 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 22 Dec 2025 05:23:27 -0700 Subject: [PATCH 10/22] Change log level from warning to debug for cwe check (#13909) [sc-12245] --- dojo/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo/models.py b/dojo/models.py index 48c274ae006..d0572d370b7 100644 --- a/dojo/models.py +++ b/dojo/models.py @@ -2946,7 +2946,7 @@ def compute_hash_code(self): # Make sure that we have a cwe if we need one if self.cwe == 0 and not self.test.hash_code_allows_null_cwe: - deduplicationLogger.warning( + deduplicationLogger.debug( "Cannot compute hash_code based on configured fields because cwe is 0 for finding of title '" + self.title + "' found in file '" + str(self.file_path) + "'. Fallback to legacy mode for this finding.") return self.compute_hash_code_legacy() From 0d416a89f213cd268fc850a421078a463d220dde Mon Sep 17 00:00:00 2001 From: valentijnscholten Date: Mon, 22 Dec 2025 13:23:34 +0100 Subject: [PATCH 11/22] make ordering by sla_age more reliable (#13918) * make ordering by sla_age safer * make ordering by sla_age safer --- dojo/finding/views.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/dojo/finding/views.py b/dojo/finding/views.py index f49eab8fb5d..76aec7ed405 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -14,7 +14,7 @@ from django.core import serializers from django.core.exceptions import PermissionDenied, ValidationError from django.db import models -from django.db.models import F, QuerySet +from django.db.models import F, QuerySet, Value from django.db.models.functions import Coalesce, ExtractDay, Length, TruncDate from django.db.models.query import Prefetch from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect, JsonResponse, StreamingHttpResponse @@ -256,6 +256,11 @@ def filter_findings_by_filter_name(self, findings: QuerySet[Finding]): return findings def filter_findings_by_form(self, request: HttpRequest, findings: QuerySet[Finding]): + # Apply default ordering if no ordering parameter is provided + # This maintains backward compatibility with the previous behavior + if not request.GET.get("o"): + findings = findings.order_by(self.get_order_by()) + # Set up the args for the form args = [request.GET, findings] # Set the initial form args @@ -276,11 +281,19 @@ def filter_findings_by_form(self, request: HttpRequest, findings: QuerySet[Findi def get_filtered_findings(self): findings = get_authorized_findings(Permissions.Finding_View) # Annotate computed SLA age in days: sla_expiration_date - (sla_start_date or date) + # Handle NULL sla_expiration_date by using Coalesce to provide a large default value + # so NULLs sort last when sorting ascending (most urgent first) findings = findings.annotate( - sla_age_days=ExtractDay( - F("sla_expiration_date") - Coalesce(F("sla_start_date"), TruncDate("created")), + sla_age_days=Coalesce( + ExtractDay( + F("sla_expiration_date") - Coalesce(F("sla_start_date"), TruncDate("created")), + ), + Value(999999), # Large value to push NULLs to the end when sorting ascending + output_field=models.IntegerField(), ), - ).order_by(self.get_order_by()) + ) + # Don't apply initial order_by here - let OrderingFilter handle it via request.GET['o'] + # This prevents conflicts between initial ordering and user-requested sorting findings = self.filter_findings_by_object(findings) return self.filter_findings_by_filter_name(findings) From 75a6b44daf88b88c94fcad42a760e3030ac127d6 Mon Sep 17 00:00:00 2001 From: Jino Tesauro <53376807+Jino-T@users.noreply.github.com> Date: Mon, 22 Dec 2025 06:23:46 -0600 Subject: [PATCH 12/22] Make SonarQube Parser use creationDate for Date (#13919) * Sonarqube parse creationDate * added better handling of date conversions * Apply suggestions from code review * Update dojo/tools/sonarqube/sonarqube_restapi_json.py --------- Co-authored-by: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> --- .../tools/sonarqube/sonarqube_restapi_json.py | 23 +++++++++++++++++++ unittests/tools/test_sonarqube_parser.py | 8 +++++++ 2 files changed, 31 insertions(+) diff --git a/dojo/tools/sonarqube/sonarqube_restapi_json.py b/dojo/tools/sonarqube/sonarqube_restapi_json.py index f56f362209c..9feb7a14397 100644 --- a/dojo/tools/sonarqube/sonarqube_restapi_json.py +++ b/dojo/tools/sonarqube/sonarqube_restapi_json.py @@ -1,5 +1,8 @@ import re +import dateutil.parser +from django.utils import timezone + from dojo.models import Finding @@ -23,6 +26,10 @@ def get_json_items(self, json_content, test, mode): scope = issue.get("scope") quickFixAvailable = str(issue.get("quickFixAvailable")) codeVariants = str(issue.get("codeVariants")) + try: + date = str(dateutil.parser.parse(issue.get("creationDate")).date()) + except (ValueError, TypeError, dateutil.parser.ParserError): + date = timezone.now() description = "" description += "**key:** " + key + "\n" description += "**rule:** " + rule + "\n" @@ -50,6 +57,7 @@ def get_json_items(self, json_content, test, mode): dynamic_finding=False, tags=["bug"], line=line, + date=date, ) elif issue.get("type") == "VULNERABILITY": key = issue.get("key") @@ -61,6 +69,10 @@ def get_json_items(self, json_content, test, mode): message = issue.get("message") line = issue.get("line") cwe = None + try: + date = str(dateutil.parser.parse(issue.get("creationDate")).date()) + except (ValueError, TypeError, dateutil.parser.ParserError): + date = timezone.now() if "Category: CWE-" in message: cwe_pattern = r"Category: CWE-\d{1,5}" cwes = re.findall(cwe_pattern, message) @@ -119,6 +131,7 @@ def get_json_items(self, json_content, test, mode): file_path=component, tags=["vulnerability"], line=line, + date=date, ) vulnids = [] if "Reference: CVE" in message: @@ -154,6 +167,10 @@ def get_json_items(self, json_content, test, mode): scope = issue.get("scope") quickFixAvailable = str(issue.get("quickFixAvailable")) codeVariants = issue.get("codeVariants", []) + try: + date = str(dateutil.parser.parse(issue.get("creationDate")).date()) + except (ValueError, TypeError, dateutil.parser.ParserError): + date = timezone.now() description = "" description += "**rule:** " + rule + "\n" description += "**component:** " + component + "\n" @@ -185,6 +202,7 @@ def get_json_items(self, json_content, test, mode): file_path=component, tags=["code_smell"], line=line, + date=date, ) items.append(item) if json_content.get("hotspots"): @@ -200,6 +218,10 @@ def get_json_items(self, json_content, test, mode): flows = hotspot.get("flows", []) ruleKey = hotspot.get("ruleKey") messageFormattings = hotspot.get("messageFormattings", []) + try: + date = str(dateutil.parser.parse(hotspot.get("creationDate")).date()) + except (ValueError, TypeError, dateutil.parser.ParserError): + date = timezone.now() description = "" description += "**key:** " + key + "\n" description += "**component:** " + component + "\n" @@ -229,6 +251,7 @@ def get_json_items(self, json_content, test, mode): file_path=component, tags=["hotspot"], line=line, + date=date, ) items.append(item) return items diff --git a/unittests/tools/test_sonarqube_parser.py b/unittests/tools/test_sonarqube_parser.py index c03a6348cab..abe7fda0120 100644 --- a/unittests/tools/test_sonarqube_parser.py +++ b/unittests/tools/test_sonarqube_parser.py @@ -579,20 +579,25 @@ def test_parse_json_file_from_api_with_multiple_findings_json(self): self.assertEqual("6.4", item.cvssv3_score) self.assertEqual("package", item.component_name) self.assertEqual("1.1.2", item.component_version) + self.assertEqual("2023-10-16", item.date) item = findings[1] self.assertEqual("Web:TableWithoutCaptionCheck_asdfwfewfwefewf", item.title) self.assertEqual("Low", item.severity) self.assertEqual(0, item.cwe) self.assertIsNone(item.cvssv3_score) + self.assertEqual("2023-07-25", item.date) item = findings[2] self.assertEqual("typescript:S1533_fjoiewfjoweifjoihugu-", item.title) self.assertEqual("Low", item.severity) + self.assertEqual("2024-01-29", item.date) item = findings[3] self.assertEqual("GHSA-frr2-c345-p7c2", item.unsaved_vulnerability_ids[0]) + self.assertEqual("2023-10-16", item.date) item = findings[4] self.assertEqual("CVE-2023-52428", item.unsaved_vulnerability_ids[0]) self.assertEqual("nimbus-jose-jwt-9.24.4.jar", item.component_name) self.assertIsNone(item.component_version) + self.assertEqual("2023-10-16", item.date) my_file_handle.close() def test_parse_json_file_from_api_with_multiple_findings_hotspots_json(self): @@ -606,12 +611,15 @@ def test_parse_json_file_from_api_with_multiple_findings_hotspots_json(self): self.assertEqual(str, type(item.description)) self.assertEqual("typescript:7777_fwafewef", item.title) self.assertEqual("High", item.severity) + self.assertEqual("2024-02-13", item.date) item = findings[1] self.assertEqual("Web:1222_cyxcvyxcvyxv", item.title) self.assertEqual("Low", item.severity) + self.assertEqual("2023-07-27", item.date) item = findings[2] self.assertEqual("Web:9876_werrwerwerwer", item.title) self.assertEqual("Low", item.severity) + self.assertEqual("2023-07-27", item.date) my_file_handle.close() def test_parse_json_file_from_api_with_empty_json(self): From 954776e2cb35bc58cba3a0ca673d71516984327b Mon Sep 17 00:00:00 2001 From: Jino Tesauro <53376807+Jino-T@users.noreply.github.com> Date: Mon, 22 Dec 2025 06:23:55 -0600 Subject: [PATCH 13/22] Make Twistlock Parser use discoveredDate for Date (#13922) * added date support using discoveredDate * Apply suggestions from code review --------- Co-authored-by: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> --- dojo/tools/twistlock/parser.py | 8 ++++++++ unittests/tools/test_twistlock_parser.py | 2 ++ 2 files changed, 10 insertions(+) diff --git a/dojo/tools/twistlock/parser.py b/dojo/tools/twistlock/parser.py index 8e765d59b55..78c9a25755d 100644 --- a/dojo/tools/twistlock/parser.py +++ b/dojo/tools/twistlock/parser.py @@ -6,6 +6,9 @@ import textwrap from datetime import datetime +import dateutil.parser +from django.utils import timezone + from dojo.models import Finding logger = logging.getLogger(__name__) @@ -235,6 +238,10 @@ def get_item(vulnerability, test, image_metadata=""): status = vulnerability.get("status", "There seems to be no fix yet. Please check description field.") cvssv3_score = vulnerability.get("cvss") riskFactors = vulnerability.get("riskFactors", "No risk factors.") + try: + date = str(dateutil.parser.parse(vulnerability.get("discoveredDate")).date()) + except (ValueError, TypeError, dateutil.parser.ParserError): + date = timezone.now() # Build impact field combining severity and image metadata which can change between scans, so we add it to the impact field as the description field is sometimes used for hash code calculation impact_parts = [severity] @@ -264,6 +271,7 @@ def get_item(vulnerability, test, image_metadata=""): cvssv3=cvssv3, cvssv3_score=cvssv3_score, impact=impact_text, + date=date, ) finding.unsaved_vulnerability_ids = [vulnerability["id"]] if "id" in vulnerability else None finding.description = finding.description.strip() diff --git a/unittests/tools/test_twistlock_parser.py b/unittests/tools/test_twistlock_parser.py index 255104e8fab..c3ba514837b 100644 --- a/unittests/tools/test_twistlock_parser.py +++ b/unittests/tools/test_twistlock_parser.py @@ -113,6 +113,7 @@ def test_parse_file_with_no_link_no_description(self): self.assertIsNotNone(finding) self.assertEqual(1, len(finding.unsaved_vulnerability_ids)) self.assertEqual("PRISMA-2021-0013", finding.unsaved_vulnerability_ids[0]) + self.assertEqual("2022-11-16", finding.date) break def test_parse_file_with_no_cvss(self): @@ -141,6 +142,7 @@ def test_parse_file_with_no_cvss(self): self.assertIn("Image ID:", finding.impact) self.assertIn("Distribution:", finding.impact) self.assertIn("Debian GNU/Linux 12", finding.impact) + self.assertEqual("2025-07-08", finding.date) def test_parse_file_with_many_vulns(self): testfile = (get_unit_tests_scans_path("twistlock") / "many_vulns.json").open(encoding="utf-8") From 18f94d0d62e7c69361bd6e2b973873dad4abe0ea Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Mon, 22 Dec 2025 13:24:03 +0100 Subject: [PATCH 14/22] fix(GHA): Fix annotation for renovate and dependabot (#13941) --- .github/workflows/test-helm-chart.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-helm-chart.yml b/.github/workflows/test-helm-chart.yml index a2c8484889e..b54fd5bf5ba 100644 --- a/.github/workflows/test-helm-chart.yml +++ b/.github/workflows/test-helm-chart.yml @@ -124,7 +124,7 @@ jobs: for c in $(echo "$chars" | grep -o .); do title="${title//"$c"/_}" done - yq -i '.annotations."artifacthub.io/changes" += "- kind: changed\n description: '$title'\n"' helm/defectdojo/Chart.yaml + yq -i '.annotations."artifacthub.io/changes" += "- kind: changed\n description: '"$title"'\n"' helm/defectdojo/Chart.yaml git add helm/defectdojo/Chart.yaml git commit -m "ci: update Chart annotations from PR #${{ github.event.pull_request.number }}" || echo "No changes to commit" From 6ced1de80d262ad2fb47a780cff4e8cca57a03fd Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Mon, 22 Dec 2025 13:24:11 +0100 Subject: [PATCH 15/22] feat(broker): Add start-up checker (#13931) Signed-off-by: kiblik <5609770+kiblik@users.noreply.github.com> --- Dockerfile.django-alpine | 1 + Dockerfile.django-debian | 1 + Dockerfile.integration-tests-debian | 1 + docker/entrypoint-celery-beat.sh | 2 ++ docker/entrypoint-celery-worker-dev.sh | 2 ++ docker/entrypoint-celery-worker.sh | 2 ++ docker/entrypoint-uwsgi-dev.sh | 2 ++ docker/entrypoint-uwsgi.sh | 2 ++ docker/reach_broker.sh | 30 ++++++++++++++++++++++++++ 9 files changed, 43 insertions(+) create mode 100644 docker/reach_broker.sh diff --git a/Dockerfile.django-alpine b/Dockerfile.django-alpine index bcca856298a..40365930275 100644 --- a/Dockerfile.django-alpine +++ b/Dockerfile.django-alpine @@ -78,6 +78,7 @@ COPY \ docker/wait-for-it.sh \ docker/secret-file-loader.sh \ docker/reach_database.sh \ + docker/reach_broker.sh \ docker/certs/* \ / COPY wsgi.py manage.py docker/unit-tests.sh ./ diff --git a/Dockerfile.django-debian b/Dockerfile.django-debian index e816d204e05..eccf9bd6dae 100644 --- a/Dockerfile.django-debian +++ b/Dockerfile.django-debian @@ -81,6 +81,7 @@ COPY \ docker/wait-for-it.sh \ docker/secret-file-loader.sh \ docker/reach_database.sh \ + docker/reach_broker.sh \ docker/certs/* \ / COPY wsgi.py manage.py docker/unit-tests.sh ./ diff --git a/Dockerfile.integration-tests-debian b/Dockerfile.integration-tests-debian index 2041a086c72..0b7c1d75b1c 100644 --- a/Dockerfile.integration-tests-debian +++ b/Dockerfile.integration-tests-debian @@ -73,6 +73,7 @@ COPY --from=openapitools /opt/openapi-generator/modules/openapi-generator-cli/ta COPY docker/wait-for-it.sh \ docker/secret-file-loader.sh \ docker/reach_database.sh \ + docker/reach_broker.sh \ docker/entrypoint-integration-tests.sh \ / diff --git a/docker/entrypoint-celery-beat.sh b/docker/entrypoint-celery-beat.sh index a185493fe92..e04e0b0b9fa 100755 --- a/docker/entrypoint-celery-beat.sh +++ b/docker/entrypoint-celery-beat.sh @@ -4,6 +4,7 @@ set -e # needed to handle "exit" correctly . /secret-file-loader.sh . /reach_database.sh +. /reach_broker.sh umask 0002 @@ -23,6 +24,7 @@ if [ "$NUM_FILES" -gt 0 ]; then fi wait_for_database_to_be_reachable +wait_for_broker_to_be_reachable echo # do the check with Django stack diff --git a/docker/entrypoint-celery-worker-dev.sh b/docker/entrypoint-celery-worker-dev.sh index bd38ed028b8..70fb7c2d51e 100644 --- a/docker/entrypoint-celery-worker-dev.sh +++ b/docker/entrypoint-celery-worker-dev.sh @@ -7,8 +7,10 @@ set -e # needed to handle "exit" correctly . /secret-file-loader.sh . /reach_database.sh +. /reach_broker.sh wait_for_database_to_be_reachable +wait_for_broker_to_be_reachable echo if [ "${DD_CELERY_WORKER_POOL_TYPE}" = "prefork" ]; then diff --git a/docker/entrypoint-celery-worker.sh b/docker/entrypoint-celery-worker.sh index 178cc3a887c..bd173b075be 100755 --- a/docker/entrypoint-celery-worker.sh +++ b/docker/entrypoint-celery-worker.sh @@ -7,6 +7,7 @@ set -e # needed to handle "exit" correctly . /secret-file-loader.sh . /reach_database.sh +. /reach_broker.sh # Allow for bind-mount multiple settings.py overrides FILES=$(ls /app/docker/extra_settings/* 2>/dev/null || true) @@ -22,6 +23,7 @@ if [ "$NUM_FILES" -gt 0 ]; then fi wait_for_database_to_be_reachable +wait_for_broker_to_be_reachable echo if [ "${DD_CELERY_WORKER_POOL_TYPE}" = "prefork" ]; then diff --git a/docker/entrypoint-uwsgi-dev.sh b/docker/entrypoint-uwsgi-dev.sh index 45b6204f5a9..7051ccadc00 100755 --- a/docker/entrypoint-uwsgi-dev.sh +++ b/docker/entrypoint-uwsgi-dev.sh @@ -4,8 +4,10 @@ set -e # needed to handle "exit" correctly . /secret-file-loader.sh . /reach_database.sh +. /reach_broker.sh wait_for_database_to_be_reachable +wait_for_broker_to_be_reachable echo cd /app || exit diff --git a/docker/entrypoint-uwsgi.sh b/docker/entrypoint-uwsgi.sh index 0628ab3390a..a9ca7bf49e6 100755 --- a/docker/entrypoint-uwsgi.sh +++ b/docker/entrypoint-uwsgi.sh @@ -3,6 +3,7 @@ set -e # needed to handle "exit" correctly . /secret-file-loader.sh . /reach_database.sh +. /reach_broker.sh # Allow for bind-mount multiple settings.py overrides FILES=$(ls /app/docker/extra_settings/* 2>/dev/null || true) @@ -18,6 +19,7 @@ if [ "$NUM_FILES" -gt 0 ]; then fi wait_for_database_to_be_reachable +wait_for_broker_to_be_reachable echo umask 0002 diff --git a/docker/reach_broker.sh b/docker/reach_broker.sh new file mode 100644 index 00000000000..1fc14a1b26c --- /dev/null +++ b/docker/reach_broker.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +wait_for_broker_to_be_reachable() { + echo -n "Waiting for broker to be reachable " + failure_count=0 + DD_BROKER_READINESS_TIMEOUT=${DD_BROKER_READINESS_TIMEOUT:-10} + while true; + do + set +e + celery --app=dojo status 2>/dev/null >/dev/null + BROKER_TEST=$? + set -e + if [[ "$BROKER_TEST" == "0" ]]; then + echo "Broker test was successful. Broker and at least one worker is connected." + break + fi + if [[ "$BROKER_TEST" == "69" ]]; then + echo "Broker test was successful. Broker is up. No worker is connected (but we are not testing that here)." + break + fi + echo -n "." + failure_count=$((failure_count + 1)) + if [ $DD_BROKER_READINESS_TIMEOUT = $failure_count ]; then + echo "Broker test was failed:" + # One more time with output + celery --app=dojo status + exit 1 + fi + done +} From 3688e2cf0d13a04d33cd8fd74af9f69084619852 Mon Sep 17 00:00:00 2001 From: DefectDojo release bot Date: Mon, 22 Dec 2025 15:20:33 +0000 Subject: [PATCH 16/22] Update versions in application files --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 8 +++++--- helm/defectdojo/README.md | 2 +- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/components/package.json b/components/package.json index d9500b421b6..385f6754f56 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.54.0-dev", + "version": "2.53.4", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index 41954f47d2f..894a4f111d5 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa: F401 -__version__ = "2.53.3" +__version__ = "2.53.4" __url__ = "https://github.com/DefectDojo/django-DefectDojo" __docs__ = "https://documentation.defectdojo.com" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 0745888f481..515736d9964 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "2.54.0-dev" +appVersion: "2.53.4" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.9.4-dev +version: 1.9.4 icon: https://defectdojo.com/hubfs/DefectDojo_favicon.png maintainers: - name: madchap @@ -33,7 +33,9 @@ dependencies: # - kind: security # description: Critical bug annotations: - artifacthub.io/prerelease: "true" + artifacthub.io/prerelease: "false" artifacthub.io/changes: | - kind: fixed description: Drop 'replicas' when HPA is in place + - kind: changed + description: Bump DefectDojo to 2.53.4 diff --git a/helm/defectdojo/README.md b/helm/defectdojo/README.md index e54f00ce161..48c668d9eed 100644 --- a/helm/defectdojo/README.md +++ b/helm/defectdojo/README.md @@ -511,7 +511,7 @@ The HELM schema will be generated for you. # General information about chart values -![Version: 1.9.4-dev](https://img.shields.io/badge/Version-1.9.4--dev-informational?style=flat-square) ![AppVersion: 2.54.0-dev](https://img.shields.io/badge/AppVersion-2.54.0--dev-informational?style=flat-square) +![Version: 1.9.4](https://img.shields.io/badge/Version-1.9.4-informational?style=flat-square) ![AppVersion: 2.53.4](https://img.shields.io/badge/AppVersion-2.53.4-informational?style=flat-square) A Helm chart for Kubernetes to install DefectDojo From f27a01917cdc29c59d17413df88ae8b8374e7c4b Mon Sep 17 00:00:00 2001 From: Valentijn Scholten Date: Mon, 22 Dec 2025 17:47:58 +0100 Subject: [PATCH 17/22] github action fetch openapi spec must wait for dojo to be up --- .github/workflows/fetch-oas.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/fetch-oas.yml b/.github/workflows/fetch-oas.yml index b74f88b4429..c0881c83396 100644 --- a/.github/workflows/fetch-oas.yml +++ b/.github/workflows/fetch-oas.yml @@ -38,6 +38,10 @@ jobs: DJANGO_VERSION: ${{ env.release_version }}-alpine NGINX_VERSION: ${{ env.release_version }}-alpine + - name: Wait for Dojo to be ready + run: | + timeout 120 bash -c 'until curl -f http://localhost:8080/api/v2/oa3/schema/; do sleep 10; done' + - name: Download OpenAPI Specifications run: |- wget 'http://localhost:8080/api/v2/oa3/schema/?format=${{ matrix.file-type }}' -O oas.${{ matrix.file-type }} --tries=10 --retry-on-http-error=502 From 5378d38721d8412b6a9021e621735d8966a1d222 Mon Sep 17 00:00:00 2001 From: valentijnscholten Date: Mon, 22 Dec 2025 18:17:19 +0100 Subject: [PATCH 18/22] also start valkey is it's now required by the entrypoitn scripts (#13960) --- .github/workflows/fetch-oas.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fetch-oas.yml b/.github/workflows/fetch-oas.yml index c0881c83396..e36da098b4c 100644 --- a/.github/workflows/fetch-oas.yml +++ b/.github/workflows/fetch-oas.yml @@ -33,7 +33,7 @@ jobs: docker images - name: Start Dojo - run: docker compose up --no-deps -d postgres nginx uwsgi + run: docker compose up --no-deps -d valkey postgres uwsgi nginx env: DJANGO_VERSION: ${{ env.release_version }}-alpine NGINX_VERSION: ${{ env.release_version }}-alpine From 5e4aaad0663013f8c4dd490b61a39d8d80458d28 Mon Sep 17 00:00:00 2001 From: DefectDojo release bot Date: Mon, 22 Dec 2025 17:29:20 +0000 Subject: [PATCH 19/22] Update versions in application files --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 12 ++++-------- helm/defectdojo/README.md | 2 +- 4 files changed, 7 insertions(+), 11 deletions(-) diff --git a/components/package.json b/components/package.json index 385f6754f56..d9500b421b6 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.53.4", + "version": "2.54.0-dev", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index 894a4f111d5..7337d10b9c1 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa: F401 -__version__ = "2.53.4" +__version__ = "2.54.0-dev" __url__ = "https://github.com/DefectDojo/django-DefectDojo" __docs__ = "https://documentation.defectdojo.com" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 515736d9964..119538cc717 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "2.53.4" +appVersion: "2.54.0-dev" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.9.4 +version: 1.9.5-dev icon: https://defectdojo.com/hubfs/DefectDojo_favicon.png maintainers: - name: madchap @@ -33,9 +33,5 @@ dependencies: # - kind: security # description: Critical bug annotations: - artifacthub.io/prerelease: "false" - artifacthub.io/changes: | - - kind: fixed - description: Drop 'replicas' when HPA is in place - - kind: changed - description: Bump DefectDojo to 2.53.4 + artifacthub.io/prerelease: "true" + artifacthub.io/changes: "" diff --git a/helm/defectdojo/README.md b/helm/defectdojo/README.md index 48c668d9eed..e749100dd98 100644 --- a/helm/defectdojo/README.md +++ b/helm/defectdojo/README.md @@ -511,7 +511,7 @@ The HELM schema will be generated for you. # General information about chart values -![Version: 1.9.4](https://img.shields.io/badge/Version-1.9.4-informational?style=flat-square) ![AppVersion: 2.53.4](https://img.shields.io/badge/AppVersion-2.53.4-informational?style=flat-square) +![Version: 1.9.5-dev](https://img.shields.io/badge/Version-1.9.5--dev-informational?style=flat-square) ![AppVersion: 2.54.0-dev](https://img.shields.io/badge/AppVersion-2.54.0--dev-informational?style=flat-square) A Helm chart for Kubernetes to install DefectDojo From 683ce9dd87880d813a68360c56abb0d684715da3 Mon Sep 17 00:00:00 2001 From: Ross Esposito Date: Mon, 22 Dec 2025 11:41:10 -0600 Subject: [PATCH 20/22] Update Helm chart docs --- helm/defectdojo/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/helm/defectdojo/README.md b/helm/defectdojo/README.md index 5751b3ce9b8..5746e30a9ad 100644 --- a/helm/defectdojo/README.md +++ b/helm/defectdojo/README.md @@ -525,7 +525,7 @@ A Helm chart for Kubernetes to install DefectDojo | Repository | Name | Version | |------------|------|---------| -| oci://registry-1.docker.io/cloudpirates | valkey | 0.10.2 | +| oci://registry-1.docker.io/cloudpirates | valkey | 0.13.0 | | oci://us-docker.pkg.dev/os-public-container-registry/defectdojo | postgresql | 16.7.27 | ## Values From 8d02cb23d031bea010fd72f572690513b7570cd1 Mon Sep 17 00:00:00 2001 From: Ross Esposito Date: Mon, 22 Dec 2025 12:09:50 -0600 Subject: [PATCH 21/22] Increasing mem for hugo --- .github/workflows/validate_docs_build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/validate_docs_build.yml b/.github/workflows/validate_docs_build.yml index 34e5ecd3f1b..525bbd3ed3b 100644 --- a/.github/workflows/validate_docs_build.yml +++ b/.github/workflows/validate_docs_build.yml @@ -38,4 +38,5 @@ jobs: env: HUGO_ENVIRONMENT: production HUGO_ENV: production + HUGO_MEMORYLIMIT: 6 run: cd docs && npm ci && hugo --minify --gc --config config/production/hugo.toml From f3ce35685e71e49be0984c0bc7a1a2b76884ea6d Mon Sep 17 00:00:00 2001 From: Ross Esposito Date: Mon, 22 Dec 2025 12:26:58 -0600 Subject: [PATCH 22/22] Bumping hugo version due to memory issue --- .github/workflows/validate_docs_build.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/validate_docs_build.yml b/.github/workflows/validate_docs_build.yml index 525bbd3ed3b..5ccfefbed3a 100644 --- a/.github/workflows/validate_docs_build.yml +++ b/.github/workflows/validate_docs_build.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Hugo uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3.0.0 with: - hugo-version: '0.153.0' # renovate: datasource=github-releases depName=gohugoio/hugo + hugo-version: '0.153.1' # renovate: datasource=github-releases depName=gohugoio/hugo extended: true - name: Setup Node @@ -38,5 +38,4 @@ jobs: env: HUGO_ENVIRONMENT: production HUGO_ENV: production - HUGO_MEMORYLIMIT: 6 run: cd docs && npm ci && hugo --minify --gc --config config/production/hugo.toml