Skip to content

Commit 18abcf6

Browse files
authored
Ruff: Add more PLW (#10848)
1 parent 70108ed commit 18abcf6

14 files changed

Lines changed: 48 additions & 36 deletions

File tree

dojo/endpoint/views.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -325,12 +325,12 @@ def edit_meta_data(request, eid):
325325
endpoint = Endpoint.objects.get(id=eid)
326326

327327
if request.method == "POST":
328-
for key, value in request.POST.items():
328+
for key, orig_value in request.POST.items():
329329
if key.startswith("cfv_"):
330330
cfv_id = int(key.split("_")[1])
331331
cfv = get_object_or_404(DojoMeta, id=cfv_id)
332332

333-
value = value.strip()
333+
value = orig_value.strip()
334334
if value:
335335
cfv.value = value
336336
cfv.save()

dojo/forms.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2383,8 +2383,10 @@ def get_jira_issue_template_dir_choices():
23832383

23842384
for dirname in dirnames:
23852385
if base_dir.startswith(settings.TEMPLATE_DIR_PREFIX):
2386-
base_dir = base_dir[len(settings.TEMPLATE_DIR_PREFIX):]
2387-
template_dir_list.append((os.path.join(base_dir, dirname), dirname))
2386+
clean_base_dir = base_dir[len(settings.TEMPLATE_DIR_PREFIX):]
2387+
else:
2388+
clean_base_dir = base_dir
2389+
template_dir_list.append((os.path.join(clean_base_dir, dirname), dirname))
23882390

23892391
logger.debug("templates: %s", template_dir_list)
23902392
return template_dir_list

dojo/importers/default_importer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,9 +157,9 @@ def process_findings(
157157
logger.debug("starting import of %i parsed findings.", len(parsed_findings) if parsed_findings else 0)
158158
group_names_to_findings_dict = {}
159159

160-
for unsaved_finding in parsed_findings:
160+
for non_clean_unsaved_finding in parsed_findings:
161161
# make sure the severity is something is digestible
162-
unsaved_finding = self.sanitize_severity(unsaved_finding)
162+
unsaved_finding = self.sanitize_severity(non_clean_unsaved_finding)
163163
# Filter on minimum severity if applicable
164164
if Finding.SEVERITIES[unsaved_finding.severity] > Finding.SEVERITIES[self.minimum_severity]:
165165
# finding's severity is below the configured threshold : ignoring the finding

dojo/importers/default_reimporter.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -179,9 +179,9 @@ def process_findings(
179179
logger.debug("STEP 1: looping over findings from the reimported report and trying to match them to existing findings")
180180
deduplicationLogger.debug(f"Algorithm used for matching new findings to existing findings: {self.deduplication_algorithm}")
181181

182-
for unsaved_finding in parsed_findings:
182+
for non_clean_unsaved_finding in parsed_findings:
183183
# make sure the severity is something is digestible
184-
unsaved_finding = self.sanitize_severity(unsaved_finding)
184+
unsaved_finding = self.sanitize_severity(non_clean_unsaved_finding)
185185
# Filter on minimum severity if applicable
186186
if Finding.SEVERITIES[unsaved_finding.severity] > Finding.SEVERITIES[self.minimum_severity]:
187187
# finding's severity is below the configured threshold : ignoring the finding

dojo/product/views.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1240,11 +1240,11 @@ def add_meta_data(request, pid):
12401240
def edit_meta_data(request, pid):
12411241
prod = Product.objects.get(id=pid)
12421242
if request.method == "POST":
1243-
for key, value in request.POST.items():
1243+
for key, orig_value in request.POST.items():
12441244
if key.startswith("cfv_"):
12451245
cfv_id = int(key.split("_")[1])
12461246
cfv = get_object_or_404(DojoMeta, id=cfv_id)
1247-
value = value.strip()
1247+
value = orig_value.strip()
12481248
if value:
12491249
cfv.value = value
12501250
cfv.save()

dojo/search/views.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -498,15 +498,15 @@ def apply_tag_filters(qs, operators, skip_relations=False):
498498

499499
# negative search based on not- prefix (not-tags, not-test-tags, not-engagement-tags, not-product-tags, etc)
500500

501-
for tag_filter in tag_filters:
502-
tag_filter = "not-" + tag_filter
501+
for base_tag_filter in tag_filters:
502+
tag_filter = "not-" + base_tag_filter
503503
if tag_filter in operators:
504504
value = operators[tag_filter]
505505
value = ",".join(value) # contains needs a single value
506506
qs = qs.exclude(**{"{}tags__name__contains".format(tag_filters[tag_filter.replace("not-", "")]): value})
507507

508-
for tag_filter in tag_filters:
509-
tag_filter = "not-" + tag_filter
508+
for base_tag_filter in tag_filters:
509+
tag_filter = "not-" + base_tag_filter
510510
if tag_filter + "s" in operators:
511511
value = operators[tag_filter + "s"]
512512
qs = qs.exclude(**{"{}tags__name__in".format(tag_filters[tag_filter.replace("not-", "")]): value})

dojo/tools/hcl_appscan/parser.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ def get_findings(self, file, test):
102102
case "port":
103103
port = self.xmltreehelper(item)
104104
description = description + "Port:" + port + "\n"
105-
finding = Finding(
105+
prepared_finding = Finding(
106106
title=title,
107107
description=description,
108108
severity=severity,
@@ -111,11 +111,11 @@ def get_findings(self, file, test):
111111
dynamic_finding=True,
112112
static_finding=False,
113113
)
114-
findings.append(finding)
114+
findings.append(prepared_finding)
115115
try:
116-
finding.unsaved_endpoints = []
116+
prepared_finding.unsaved_endpoints = []
117117
endpoint = Endpoint(host=host, port=port)
118-
finding.unsaved_endpoints.append(endpoint)
118+
prepared_finding.unsaved_endpoints.append(endpoint)
119119
except UnboundLocalError:
120120
pass
121121
return findings

dojo/tools/intsights/parser.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def get_findings(self, file, test):
5353
raise ValueError(msg)
5454
for alert in alerts:
5555
dupe_key = alert["alert_id"]
56-
alert = Finding(
56+
uniq_alert = Finding(
5757
title=alert["title"],
5858
test=test,
5959
active=False if alert["status"] == "Closed" else True,
@@ -65,7 +65,7 @@ def get_findings(self, file, test):
6565
dynamic_finding=True,
6666
unique_id_from_tool=alert["alert_id"],
6767
)
68-
duplicates[dupe_key] = alert
68+
duplicates[dupe_key] = uniq_alert
6969
if dupe_key not in duplicates:
7070
duplicates[dupe_key] = True
7171
return list(duplicates.values())

dojo/tools/jfrog_xray_on_demand_binary_scan/parser.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -162,8 +162,8 @@ def get_item_set(vulnerability):
162162
cvss_v3 = cves[0]["cvss_v3_vector"]
163163
cvssv3 = CVSS3(cvss_v3).clean_vector()
164164

165-
for component_name, component in vulnerability.get("components", {}).items():
166-
component_name, component_version = get_component_name_version(component_name)
165+
for component_name_with_version, component in vulnerability.get("components", {}).items():
166+
component_name, component_version = get_component_name_version(component_name_with_version)
167167
mitigation, impact = process_component(component)
168168

169169
title = clean_title(vulnerability["summary"])

dojo/tools/mobsf/parser.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,8 @@ def get_findings(self, filename, test):
7373
if "urls" in data:
7474
curl = ""
7575
for url in data["urls"]:
76-
for curl in url["urls"]:
77-
curl = f"{curl}\n"
76+
for durl in url["urls"]:
77+
curl = f"{durl}\n"
7878

7979
if curl:
8080
test_description = f"{test_description}\n**URL's:**\n {curl}\n"

0 commit comments

Comments
 (0)