Skip to content

Commit e2f468b

Browse files
authored
Merge pull request #14107 from DefectDojo/import-push-to-jira-when-not-grouped
Import/Reimport: Push to jira when findings is not grouped
2 parents 0ccb6ae + 1cad7b2 commit e2f468b

19 files changed

Lines changed: 18692 additions & 8111 deletions

File tree

dojo/importers/base_importer.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -767,11 +767,13 @@ def process_finding_groups(
767767
self,
768768
finding: Finding,
769769
group_names_to_findings_dict: dict,
770-
) -> None:
770+
) -> bool:
771771
"""
772772
Determines how to handle an incoming finding with respect to grouping
773773
if finding groups are enabled, use the supplied grouping mechanism to
774-
store a reference of how the finding should be grouped
774+
store a reference of how the finding should be grouped.
775+
776+
Returns True if the finding was added to a group, False otherwise.
775777
"""
776778
if self.findings_groups_enabled and self.group_by:
777779
# If finding groups are enabled, group all findings by group name
@@ -781,6 +783,8 @@ def process_finding_groups(
781783
group_names_to_findings_dict[name].append(finding)
782784
else:
783785
group_names_to_findings_dict[name] = [finding]
786+
return True
787+
return False
784788

785789
def process_request_response_pairs(
786790
self,

dojo/importers/default_importer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ def process_findings(
222222
unsaved_finding.save_no_options()
223223

224224
# Determine how the finding should be grouped
225-
self.process_finding_groups(
225+
finding_will_be_grouped = self.process_finding_groups(
226226
finding,
227227
group_names_to_findings_dict,
228228
)
@@ -245,7 +245,7 @@ def process_findings(
245245
# all data is already saved on the finding, we only need to trigger post processing in batches
246246
logger.debug("process_findings: self.push_to_jira=%s, self.findings_groups_enabled=%s, self.group_by=%s",
247247
self.push_to_jira, self.findings_groups_enabled, self.group_by)
248-
push_to_jira = self.push_to_jira and (not self.findings_groups_enabled or not self.group_by)
248+
push_to_jira = self.push_to_jira and ((not self.findings_groups_enabled or not self.group_by) or not finding_will_be_grouped)
249249
logger.debug("process_findings: computed push_to_jira=%s", push_to_jira)
250250
batch_finding_ids.append(finding.id)
251251

dojo/importers/default_reimporter.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -359,6 +359,8 @@ def process_findings(
359359
unsaved_finding,
360360
existing_finding,
361361
)
362+
# Findings that have already exist cannot be moved to into a group
363+
finding_will_be_grouped = False
362364
# Determine if we should skip the rest of the loop
363365
if force_continue:
364366
continue
@@ -374,7 +376,7 @@ def process_findings(
374376
self.user,
375377
)
376378
else:
377-
finding = self.process_finding_that_was_not_matched(unsaved_finding)
379+
finding, finding_will_be_grouped = self.process_finding_that_was_not_matched(unsaved_finding)
378380

379381
# Add newly created finding to candidates for subsequent findings in this batch
380382
self.add_new_finding_to_candidates(
@@ -392,7 +394,7 @@ def process_findings(
392394
unsaved_finding,
393395
)
394396
# all data is already saved on the finding, we only need to trigger post processing in batches
395-
push_to_jira = self.push_to_jira and (not self.findings_groups_enabled or not self.group_by)
397+
push_to_jira = self.push_to_jira and ((not self.findings_groups_enabled or not self.group_by) or not finding_will_be_grouped)
396398
batch_finding_ids.append(finding.id)
397399

398400
# Post-processing batches (deduplication, rules, etc.) are separate from matching batches.
@@ -827,7 +829,7 @@ def process_matched_active_finding(
827829
def process_finding_that_was_not_matched(
828830
self,
829831
unsaved_finding: Finding,
830-
) -> Finding:
832+
) -> tuple[Finding, bool]:
831833
"""Create a new finding from the one parsed from the report"""
832834
# Set some explicit settings
833835
unsaved_finding.reporter = self.user
@@ -855,15 +857,15 @@ def process_finding_that_was_not_matched(
855857
f"({finding.component_name} - {finding.component_version})",
856858
)
857859
# Manage the finding grouping selection
858-
self.process_finding_groups(
860+
finding_will_be_grouped = self.process_finding_groups(
859861
unsaved_finding,
860862
self.group_names_to_findings_dict,
861863
)
862864
# Add the new finding to the list
863865
self.new_items.append(unsaved_finding)
864866
# Process any request/response pairs
865867
self.process_request_response_pairs(unsaved_finding)
866-
return unsaved_finding
868+
return unsaved_finding, finding_will_be_grouped
867869

868870
def reconcile_vulnerability_ids(
869871
self,

unittests/test_jira_import_and_pushing_api.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -185,6 +185,15 @@ def test_import_with_groups_with_push_to_jira_is_false_but_push_all(self):
185185
# by asserting full cassette is played we know issues have been updated in JIRA
186186
self.assert_cassette_played()
187187

188+
def test_import_with_group_by_with_push_all_but_no_groups_created(self):
189+
self.set_jira_push_all_issues(self.get_engagement(1))
190+
import0 = self.import_scan_with_params(self.zap_sample5_filename, group_by="component_name+component_version", verified=True)
191+
test_id = import0["test"]
192+
self.assert_jira_issue_count_in_test(test_id, 2)
193+
self.assert_jira_group_issue_count_in_test(test_id, 0)
194+
# by asserting full cassette is played we know issues have been updated in JIRA
195+
self.assert_cassette_played()
196+
188197
def test_import_no_push_to_jira_reimport_no_push_to_jira(self):
189198
import0 = self.import_scan_with_params(self.zap_sample5_filename, verified=True)
190199
test_id = import0["test"]

0 commit comments

Comments
 (0)