From 78590298ea24480c58b47e424b1e0a925134620c Mon Sep 17 00:00:00 2001 From: Valentijn Scholten Date: Tue, 16 Dec 2025 18:06:24 +0100 Subject: [PATCH 1/4] push_to_jira: add logging --- dojo/api_v2/serializers.py | 2 ++ dojo/api_v2/views.py | 2 +- dojo/finding/helper.py | 9 +++++++++ dojo/importers/default_importer.py | 2 ++ 4 files changed, 14 insertions(+), 1 deletion(-) diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index 2f884b3bb4a..4cada0d1dc6 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -2281,6 +2281,7 @@ def process_scan( Raises exceptions in the event of an error """ try: + logger.debug(f"process_scan called with context: {context}") start_time = time.perf_counter() importer = self.get_importer(**context) context["test"], _, _, _, _, _, _ = importer.process_scan( @@ -2558,6 +2559,7 @@ def process_scan( """ statistics_before, statistics_delta = None, None try: + logger.debug(f"process_scan called with context: {context}") start_time = time.perf_counter() if test := context.get("test"): statistics_before = test.statistics diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index bdde57955f2..b321c35d558 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -2515,7 +2515,7 @@ def perform_create(self, serializer): jira_driver = engagement or (product or None) if jira_project := (jira_helper.get_jira_project(jira_driver) if jira_driver else None): push_to_jira = push_to_jira or jira_project.push_all_issues - # logger.debug(f"push_to_jira: {push_to_jira}") + serializer.save(push_to_jira=push_to_jira) def get_queryset(self): diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index 19bf9ee6d99..d90febe6ea0 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -479,12 +479,19 @@ def post_process_findings_batch_signature(finding_ids, *args, dedupe_option=True def post_process_findings_batch(finding_ids, *args, dedupe_option=True, rules_option=True, product_grading_option=True, issue_updater_option=True, push_to_jira=False, user=None, **kwargs): + logger.debug( + f"post_process_findings_batch called: finding_ids_count={len(finding_ids) if finding_ids else 0}, " + f"args={args}, dedupe_option={dedupe_option}, rules_option={rules_option}, " + f"product_grading_option={product_grading_option}, issue_updater_option={issue_updater_option}, " + f"push_to_jira={push_to_jira}, user={user.id if user else None}, kwargs={kwargs}", + ) if not finding_ids: return system_settings = System_Settings.objects.get() # use list() to force a complete query execution and related objects to be loaded once + logger.debug(f"getting finding models for batch deduplication with: {len(finding_ids)} findings") findings = get_finding_models_for_deduplication(finding_ids) if not findings: @@ -517,6 +524,8 @@ def post_process_findings_batch(finding_ids, *args, dedupe_option=True, rules_op jira_helper.push_to_jira(finding) else: jira_helper.push_to_jira(finding.finding_group) + else: + logger.debug("push_to_jira is False, not ushing to JIRA") @receiver(pre_delete, sender=Finding) diff --git a/dojo/importers/default_importer.py b/dojo/importers/default_importer.py index 63f41b8f744..17401b0168d 100644 --- a/dojo/importers/default_importer.py +++ b/dojo/importers/default_importer.py @@ -279,6 +279,8 @@ def process_findings( jira_helper.push_to_jira(findings[0].finding_group) else: jira_helper.push_to_jira(findings[0]) + else: + logger.debug("push_to_jira is False, not pushing to JIRA") # Note: All chord batching is now handled within the loop above From 7722de8c1839c1ae4a059e8669104fa74a5c220d Mon Sep 17 00:00:00 2001 From: Valentijn Scholten Date: Tue, 16 Dec 2025 18:45:30 +0100 Subject: [PATCH 2/4] push_to_jira: add logging --- dojo/finding/deduplication.py | 2 ++ dojo/finding/helper.py | 1 + 2 files changed, 3 insertions(+) diff --git a/dojo/finding/deduplication.py b/dojo/finding/deduplication.py index 7297e55fef4..d11c66b17ba 100644 --- a/dojo/finding/deduplication.py +++ b/dojo/finding/deduplication.py @@ -27,6 +27,7 @@ def get_finding_models_for_deduplication(finding_ids): """ if not finding_ids: + logger.debug("get_finding_models_for_deduplication called with no finding_ids") return [] return list( @@ -543,6 +544,7 @@ def dedupe_batch_of_findings(findings, *args, **kwargs): return batch_dedupe_method(findings, *args, **kwargs) if not findings: + logger.debug("dedupe_batch_of_findings called with no findings") return None enabled = System_Settings.objects.get().enable_deduplication diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index d90febe6ea0..e3c85666c77 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -493,6 +493,7 @@ def post_process_findings_batch(finding_ids, *args, dedupe_option=True, rules_op # use list() to force a complete query execution and related objects to be loaded once logger.debug(f"getting finding models for batch deduplication with: {len(finding_ids)} findings") findings = get_finding_models_for_deduplication(finding_ids) + logger.debug(f"found {len(findings)} findings for batch deduplication") if not findings: logger.debug(f"no findings found for batch deduplication with IDs: {finding_ids}") From d18feb463e0bfb64ce6b7fac35f0b67f2c19da3e Mon Sep 17 00:00:00 2001 From: Valentijn Scholten Date: Wed, 17 Dec 2025 17:43:24 +0100 Subject: [PATCH 3/4] push to jira: fix passing of parameters in async mode --- dojo/engagement/views.py | 24 ++++++++++++++++++++++++ dojo/finding/helper.py | 4 ++-- dojo/importers/default_importer.py | 12 ++++++++++-- dojo/test/views.py | 24 ++++++++++++++++++++++++ 4 files changed, 60 insertions(+), 4 deletions(-) diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py index 706210b569c..a726f514421 100644 --- a/dojo/engagement/views.py +++ b/dojo/engagement/views.py @@ -936,6 +936,30 @@ def import_findings( ) -> str | None: """Attempt to import with all the supplied information""" try: + # Log only user-entered form values, excluding internal objects + user_values = { + "scan_type": context.get("scan_type"), + "scan_date": context.get("scan_date"), + "minimum_severity": context.get("minimum_severity"), + "active": context.get("active"), + "verified": context.get("verified"), + "test_title": context.get("test_title"), + "tags": context.get("tags"), + "version": context.get("version"), + "branch_tag": context.get("branch_tag"), + "build_id": context.get("build_id"), + "commit_hash": context.get("commit_hash"), + "service": context.get("service"), + "close_old_findings": context.get("close_old_findings"), + "apply_tags_to_findings": context.get("apply_tags_to_findings"), + "apply_tags_to_endpoints": context.get("apply_tags_to_endpoints"), + "close_old_findings_product_scope": context.get("close_old_findings_product_scope"), + "group_by": context.get("group_by"), + "create_finding_groups_for_all_findings": context.get("create_finding_groups_for_all_findings"), + "push_to_jira": context.get("push_to_jira"), + "push_all_jira_issues": context.get("push_all_jira_issues"), + } + logger.debug(f"import_findings called with user values: {user_values}") importer_client = self.get_importer(context) context["test"], _, finding_count, closed_finding_count, _, _, _ = importer_client.process_scan( context.pop("scan", None), diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index e3c85666c77..1fcf0726957 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -470,8 +470,8 @@ def post_process_finding_save_internal(finding, dedupe_option=True, rules_option @app.task def post_process_findings_batch_signature(finding_ids, *args, dedupe_option=True, rules_option=True, product_grading_option=True, issue_updater_option=True, push_to_jira=False, user=None, **kwargs): - return post_process_findings_batch(finding_ids, dedupe_option, rules_option, product_grading_option, - issue_updater_option, push_to_jira, user, **kwargs) + # Pass arguments as keyword arguments to ensure Celery properly serializes them + return post_process_findings_batch(finding_ids, *args, dedupe_option=dedupe_option, rules_option=rules_option, product_grading_option=product_grading_option, issue_updater_option=issue_updater_option, push_to_jira=push_to_jira, user=user, **kwargs) @dojo_async_task diff --git a/dojo/importers/default_importer.py b/dojo/importers/default_importer.py index 17401b0168d..3030e7f4e4e 100644 --- a/dojo/importers/default_importer.py +++ b/dojo/importers/default_importer.py @@ -238,22 +238,30 @@ def process_findings( # Categorize this finding as a new one new_findings.append(finding) # all data is already saved on the finding, we only need to trigger post processing in batches + logger.debug("process_findings: self.push_to_jira=%s, self.findings_groups_enabled=%s, self.group_by=%s", + self.push_to_jira, self.findings_groups_enabled, self.group_by) push_to_jira = self.push_to_jira and (not self.findings_groups_enabled or not self.group_by) + logger.debug("process_findings: computed push_to_jira=%s", push_to_jira) batch_finding_ids.append(finding.id) # If batch is full or we're at the end, dispatch one batched task if len(batch_finding_ids) >= batch_max_size or is_final_finding: finding_ids_batch = list(batch_finding_ids) batch_finding_ids.clear() + logger.debug("process_findings: dispatching batch with push_to_jira=%s (batch_size=%d, is_final=%s)", + push_to_jira, len(finding_ids_batch), is_final_finding) if we_want_async(async_user=self.user): - finding_helper.post_process_findings_batch_signature( + signature = finding_helper.post_process_findings_batch_signature( finding_ids_batch, dedupe_option=True, rules_option=True, product_grading_option=True, issue_updater_option=True, push_to_jira=push_to_jira, - )() + ) + logger.debug("process_findings: signature created with push_to_jira=%s, signature.kwargs=%s", + push_to_jira, signature.kwargs) + signature() else: finding_helper.post_process_findings_batch( finding_ids_batch, diff --git a/dojo/test/views.py b/dojo/test/views.py index 5db820d6c3a..d2bf11092e7 100644 --- a/dojo/test/views.py +++ b/dojo/test/views.py @@ -964,6 +964,30 @@ def reimport_findings( ) -> str | None: """Attempt to import with all the supplied information""" try: + # Log only user-entered form values, excluding internal objects + user_values = { + "scan_type": context.get("scan_type"), + "scan_date": context.get("scan_date"), + "minimum_severity": context.get("minimum_severity"), + "active": context.get("active"), + "verified": context.get("verified"), + "tags": context.get("tags"), + "version": context.get("version"), + "branch_tag": context.get("branch_tag"), + "build_id": context.get("build_id"), + "commit_hash": context.get("commit_hash"), + "service": context.get("service"), + "close_old_findings": context.get("close_old_findings"), + "apply_tags_to_findings": context.get("apply_tags_to_findings"), + "apply_tags_to_endpoints": context.get("apply_tags_to_endpoints"), + "close_old_findings_product_scope": context.get("close_old_findings_product_scope"), + "group_by": context.get("group_by"), + "create_finding_groups_for_all_findings": context.get("create_finding_groups_for_all_findings"), + "push_to_jira": context.get("push_to_jira"), + "push_all_jira_issues": context.get("push_all_jira_issues"), + "do_not_reactivate": context.get("do_not_reactivate"), + } + logger.debug(f"reimport_findings called with user values: {user_values}") importer_client = self.get_reimporter(context) ( context["test"], From ec2c7642d0c80a0cad8970e14c9a5650e359f3a5 Mon Sep 17 00:00:00 2001 From: Valentijn Scholten Date: Wed, 17 Dec 2025 17:52:24 +0100 Subject: [PATCH 4/4] push to jira: fix passing of parameters in async mode --- dojo/finding/helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index 1fcf0726957..a0cf29f2120 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -470,8 +470,8 @@ def post_process_finding_save_internal(finding, dedupe_option=True, rules_option @app.task def post_process_findings_batch_signature(finding_ids, *args, dedupe_option=True, rules_option=True, product_grading_option=True, issue_updater_option=True, push_to_jira=False, user=None, **kwargs): - # Pass arguments as keyword arguments to ensure Celery properly serializes them return post_process_findings_batch(finding_ids, *args, dedupe_option=dedupe_option, rules_option=rules_option, product_grading_option=product_grading_option, issue_updater_option=issue_updater_option, push_to_jira=push_to_jira, user=user, **kwargs) + # Pass arguments as keyword arguments to ensure Celery properly serializes them @dojo_async_task