Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions dojo/finding/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,29 +396,29 @@ def add_findings_to_auto_group(name, findings, group_by, *, create_finding_group
@app.task
@dojo_model_from_id
def post_process_finding_save_signature(finding, dedupe_option=True, rules_option=True, product_grading_option=True, # noqa: FBT002
issue_updater_option=True, push_to_jira=False, user=None, *args, **kwargs): # noqa: FBT002 - this is bit hard to fix nice have this universally fixed
issue_updater_option=True, push_to_jira=False, alert_on_error=False, user=None, *args, **kwargs): # noqa: FBT002 - this is bit hard to fix nice have this universally fixed
"""
Returns a task signature for post-processing a finding. This is useful for creating task signatures
that can be used in chords or groups or to await results. We need this extra method because of our dojo_async decorator.
If we use more of these celery features, we should probably move away from that decorator.
"""
return post_process_finding_save_internal(finding, dedupe_option, rules_option, product_grading_option,
issue_updater_option, push_to_jira, user, *args, **kwargs)
issue_updater_option, push_to_jira, alert_on_error, user, *args, **kwargs)


@dojo_model_to_id
@dojo_async_task
@app.task
@dojo_model_from_id
def post_process_finding_save(finding, dedupe_option=True, rules_option=True, product_grading_option=True, # noqa: FBT002
issue_updater_option=True, push_to_jira=False, user=None, *args, **kwargs): # noqa: FBT002 - this is bit hard to fix nice have this universally fixed
issue_updater_option=True, push_to_jira=False, alert_on_error=False, user=None, *args, **kwargs): # noqa: FBT002 - this is bit hard to fix nice have this universally fixed

return post_process_finding_save_internal(finding, dedupe_option, rules_option, product_grading_option,
issue_updater_option, push_to_jira, user, *args, **kwargs)
issue_updater_option, push_to_jira, alert_on_error, user, *args, **kwargs)


def post_process_finding_save_internal(finding, dedupe_option=True, rules_option=True, product_grading_option=True, # noqa: FBT002
issue_updater_option=True, push_to_jira=False, user=None, *args, **kwargs): # noqa: FBT002 - this is bit hard to fix nice have this universally fixed
issue_updater_option=True, push_to_jira=False, alert_on_error=False, user=None, *args, **kwargs): # noqa: FBT002 - this is bit hard to fix nice have this universally fixed

if not finding:
logger.warning("post_process_finding_save called with finding==None, skipping post processing")
Expand Down Expand Up @@ -461,9 +461,9 @@ def post_process_finding_save_internal(finding, dedupe_option=True, rules_option
# based on feedback we could introduct another push_group_to_jira boolean everywhere
# but what about the push_all boolean? Let's see how this works for now and get some feedback.
if finding.has_jira_issue or not finding.finding_group:
jira_helper.push_to_jira(finding)
jira_helper.push_to_jira(finding, alert_on_error=alert_on_error)
elif finding.finding_group:
jira_helper.push_to_jira(finding.finding_group)
jira_helper.push_to_jira(finding.finding_group, alert_on_error=alert_on_error)


@dojo_async_task(signature=True)
Expand Down
41 changes: 18 additions & 23 deletions dojo/finding/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -970,8 +970,9 @@ def process_jira_form(self, request: HttpRequest, finding: Finding, context: dic
logger.debug("jform.jira_issue: %s", context["jform"].cleaned_data.get("jira_issue"))
logger.debug(JFORM_PUSH_TO_JIRA_MESSAGE, context["jform"].cleaned_data.get("push_to_jira"))
# can't use helper as when push_all_jira_issues is True, the checkbox gets disabled and is always false
push_to_jira_checkbox = context["jform"].cleaned_data.get("push_to_jira")
push_all_jira_issues = jira_helper.is_push_all_issues(finding)
push_to_jira = push_all_jira_issues or context["jform"].cleaned_data.get("push_to_jira")
push_to_jira = push_all_jira_issues or push_to_jira_checkbox or jira_helper.is_keep_in_sync_with_jira(finding)
logger.debug("push_to_jira: %s", push_to_jira)
logger.debug("push_all_jira_issues: %s", push_all_jira_issues)
logger.debug("has_jira_group_issue: %s", finding.has_jira_group_issue)
Expand All @@ -998,12 +999,6 @@ def process_jira_form(self, request: HttpRequest, finding: Finding, context: dic
jira_helper.finding_link_jira(request, finding, new_jira_issue_key)
jira_message = "Linked a JIRA issue successfully."
# any existing finding should be updated
jira_instance = jira_helper.get_jira_instance(finding)
push_to_jira = (
push_to_jira
and not (push_to_jira and finding.finding_group)
and (finding.has_jira_issue or (jira_instance and jira_instance.finding_jira_sync))
)
# Determine if a message should be added
if jira_message:
messages.add_message(
Expand Down Expand Up @@ -1052,13 +1047,13 @@ def process_forms(self, request: HttpRequest, finding: Finding, context: dict):
# do not relaunch deduplication, otherwise, it's never taken into account
if old_finding.duplicate and not new_finding.duplicate:
new_finding.duplicate_finding = None
new_finding.save(push_to_jira=push_to_jira, dedupe_option=False)
new_finding.save(push_to_jira=push_to_jira, alert_on_error=True, dedupe_option=False)
else:
new_finding.save(push_to_jira=push_to_jira)
new_finding.save(push_to_jira=push_to_jira, alert_on_error=True)
# we only push the group after storing the finding to make sure
# the updated data of the finding is pushed as part of the group
if push_to_jira and finding.finding_group:
jira_helper.push_to_jira(finding.finding_group)
jira_helper.push_to_jira(finding.finding_group, alert_on_error=True)

return request, all_forms_valid

Expand Down Expand Up @@ -1278,12 +1273,12 @@ def defect_finding_review(request, fid):
new_note.entry += "\nJira issue re-opened."
jira_helper.add_comment(finding, new_note, force_push=True)
# Save the finding
finding.save(push_to_jira=(push_to_jira and not finding_in_group))
finding.save(push_to_jira=(push_to_jira and not finding_in_group), alert_on_error=True)

# we only push the group after saving the finding to make sure
# the updated data of the finding is pushed as part of the group
if push_to_jira and finding_in_group:
jira_helper.push_to_jira(finding.finding_group)
jira_helper.push_to_jira(finding.finding_group, alert_on_error=True)

messages.add_message(
request, messages.SUCCESS, "Defect Reviewed", extra_tags="alert-success",
Expand Down Expand Up @@ -1567,12 +1562,12 @@ def request_finding_review(request, fid):
if push_to_jira and not finding_in_group:
jira_helper.add_comment(finding, new_note, force_push=True)
# Save the finding
finding.save(push_to_jira=(push_to_jira and not finding_in_group))
finding.save(push_to_jira=(push_to_jira and not finding_in_group), alert_on_error=True)

# we only push the group after saving the finding to make sure
# the updated data of the finding is pushed as part of the group
if push_to_jira and finding_in_group:
jira_helper.push_to_jira(finding.finding_group)
jira_helper.push_to_jira(finding.finding_group, alert_on_error=True)

reviewers = Dojo_User.objects.filter(id__in=form.cleaned_data["reviewers"])
reviewers_string = ", ".join([f"{user} ({user.id})" for user in reviewers])
Expand Down Expand Up @@ -1662,12 +1657,12 @@ def clear_finding_review(request, fid):
if push_to_jira and not finding_in_group:
jira_helper.add_comment(finding, new_note, force_push=True)
# Save the finding
finding.save(push_to_jira=(push_to_jira and not finding_in_group))
finding.save(push_to_jira=(push_to_jira and not finding_in_group), alert_on_error=True)

# we only push the group after saving the finding to make sure
# the updated data of the finding is pushed as part of the group
if push_to_jira and finding_in_group:
jira_helper.push_to_jira(finding.finding_group)
jira_helper.push_to_jira(finding.finding_group, alert_on_error=True)

messages.add_message(
request,
Expand Down Expand Up @@ -2033,7 +2028,7 @@ def promote_to_finding(request, fid):
new_finding, form.cleaned_data["vulnerability_ids"].split(),
)

new_finding.save(push_to_jira=push_to_jira)
new_finding.save(push_to_jira=push_to_jira, alert_on_error=True)

finding.delete()
if "githubform" in request.POST:
Expand Down Expand Up @@ -2873,12 +2868,12 @@ def finding_bulk_update_all(request, pid=None):
) = jira_helper.can_be_pushed_to_jira(group)
if not can_be_pushed_to_jira:
error_counts[error_message] += 1
jira_helper.log_jira_cannot_be_pushed_reason(error_message, group)
jira_helper.log_jira_cannot_be_pushed_reason(error_message, group, alert_on_error=True)
else:
logger.debug(
"pushing to jira from finding.finding_bulk_update_all()",
)
jira_helper.push_to_jira(group)
jira_helper.push_to_jira(group, alert_on_error=True)
success_count += 1

for error_message, error_count in error_counts.items():
Expand Down Expand Up @@ -2921,15 +2916,15 @@ def finding_bulk_update_all(request, pid=None):
"finding already pushed as part of Finding Group"
)
error_counts[error_message] += 1
jira_helper.log_jira_cannot_be_pushed_reason(error_message, finding)
jira_helper.log_jira_cannot_be_pushed_reason(error_message, finding, alert_on_error=True)
elif not can_be_pushed_to_jira:
error_counts[error_message] += 1
jira_helper.log_jira_cannot_be_pushed_reason(error_message, finding)
jira_helper.log_jira_cannot_be_pushed_reason(error_message, finding, alert_on_error=True)
else:
logger.debug(
"pushing to jira from finding.finding_bulk_update_all()",
)
jira_helper.push_to_jira(finding)
jira_helper.push_to_jira(finding, alert_on_error=True)
if note is not None and isinstance(note, Notes):
jira_helper.add_comment(finding, note)
success_count += 1
Expand Down Expand Up @@ -3218,7 +3213,7 @@ def push_to_jira(request, fid):
# but cant't change too much now without having a test suite,
# so leave as is for now with the addition warning message
# to check alerts for background errors.
if jira_helper.push_to_jira(finding):
if jira_helper.push_to_jira(finding, alert_on_error=True):
messages.add_message(
request,
messages.SUCCESS,
Expand Down
2 changes: 1 addition & 1 deletion dojo/finding_group/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def push_to_jira(request, fgid):

# it may look like success here, but the push_to_jira are swallowing exceptions
# but cant't change too much now without having a test suite, so leave as is for now with the addition warning message to check alerts for background errors.
if jira_helper.push_to_jira(group, sync=True):
if jira_helper.push_to_jira(group, sync=True, alert_on_error=True):
messages.add_message(
request,
messages.SUCCESS,
Expand Down
6 changes: 3 additions & 3 deletions dojo/importers/default_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,9 +284,9 @@ def process_findings(
)
if self.push_to_jira:
if findings[0].finding_group is not None:
jira_helper.push_to_jira(findings[0].finding_group)
jira_helper.push_to_jira(findings[0].finding_group, alert_on_error=True)
else:
jira_helper.push_to_jira(findings[0])
jira_helper.push_to_jira(findings[0], alert_on_error=True)
else:
logger.debug("push_to_jira is False, not pushing to JIRA")

Expand Down Expand Up @@ -377,7 +377,7 @@ def close_old_findings(
# push finding groups to jira since we only only want to push whole groups
if self.findings_groups_enabled and self.push_to_jira:
for finding_group in {finding.finding_group for finding in old_findings if finding.finding_group is not None}:
jira_helper.push_to_jira(finding_group)
jira_helper.push_to_jira(finding_group, alert_on_error=True)

# Calculate grade once after all findings have been closed
if old_findings:
Expand Down
8 changes: 4 additions & 4 deletions dojo/importers/default_reimporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -483,7 +483,7 @@ def close_old_findings(
# push finding groups to jira since we only only want to push whole groups
if self.findings_groups_enabled and self.push_to_jira:
for finding_group in {finding.finding_group for finding in findings if finding.finding_group is not None}:
jira_helper.push_to_jira(finding_group)
jira_helper.push_to_jira(finding_group, alert_on_error=True)

# Calculate grade once after all findings have been closed
if mitigated_findings:
Expand Down Expand Up @@ -949,17 +949,17 @@ def process_groups_for_all_findings(
)
if self.push_to_jira:
if findings[0].finding_group is not None:
jira_helper.push_to_jira(findings[0].finding_group)
jira_helper.push_to_jira(findings[0].finding_group, alert_on_error=True)
else:
jira_helper.push_to_jira(findings[0])
jira_helper.push_to_jira(findings[0], alert_on_error=True)

if self.findings_groups_enabled and self.push_to_jira:
for finding_group in {
finding.finding_group
for finding in self.reactivated_items + self.unchanged_items
if finding.finding_group is not None and not finding.is_mitigated
}:
jira_helper.push_to_jira(finding_group)
jira_helper.push_to_jira(finding_group, alert_on_error=True)

def process_results(
self,
Expand Down
27 changes: 23 additions & 4 deletions dojo/jira_link/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -609,8 +609,24 @@
obj=obj)


def log_jira_cannot_be_pushed_reason(error, obj):
"""Creates an Alert for GUI display when handling a specific (finding/group/epic) object"""
def log_jira_cannot_be_pushed_reason(error, obj, alert_on_error=False):

Check failure on line 612 in dojo/jira_link/helper.py

View workflow job for this annotation

GitHub Actions / ruff-linting

Ruff (FBT002)

dojo/jira_link/helper.py:612:50: FBT002 Boolean default positional argument in function definition
"""
Creates an Alert for GUI display when handling a specific (finding/group/epic) object.

Args:
error: Error message to display
obj: The object that cannot be pushed
alert_on_error: If True, create alerts. If False, only log. Defaults to False.

"""
if not alert_on_error:
logger.debug(
"%s cannot be pushed to JIRA (alerts disabled): %s",
to_str_typed(obj),
error,
)
return

create_notification(
event="jira_update",
title="Error pushing to JIRA " + "(" + truncate_with_dots(prod_name(obj), 25) + ")",
Expand Down Expand Up @@ -883,6 +899,9 @@


def add_jira_issue(obj, *args, **kwargs):
# Extract alert_on_error from kwargs, default to False for backward compatibility
alert_on_error = kwargs.pop("alert_on_error", False)

def failure_to_add_message(message: str, exception: Exception, _: Any) -> bool:
if exception:
logger.error("Exception occurred", exc_info=exception)
Expand Down Expand Up @@ -911,9 +930,9 @@
# not sure why this check is not part of can_be_pushed_to_jira, but afraid to change it
if isinstance(obj, Finding) and obj.duplicate and not obj.active:
logger.warning("%s will not be pushed to JIRA as it's a duplicate finding", to_str_typed(obj))
log_jira_cannot_be_pushed_reason(error_message + " and findis a duplicate", obj)
log_jira_cannot_be_pushed_reason(error_message + " and findis a duplicate", obj, alert_on_error=alert_on_error)
else:
log_jira_cannot_be_pushed_reason(error_message, obj)
log_jira_cannot_be_pushed_reason(error_message, obj, alert_on_error=alert_on_error)
logger.warning("%s cannot be pushed to JIRA: %s.", to_str_typed(obj), error_message)
logger.warning("The JIRA issue will NOT be created.")
return False
Expand Down
4 changes: 2 additions & 2 deletions dojo/middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,10 +147,10 @@ class System_Settings_Manager(models.Manager):

def get_from_db(self, *args, **kwargs):
# logger.debug('refreshing system_settings from db')
from dojo.models import System_Settings # noqa: PLC0415 circular import
try:
from_db = super().get(*args, **kwargs)
except:
from dojo.models import System_Settings # noqa: PLC0415 circular import
except System_Settings.DoesNotExist:
# this mimics the existing code that was in filters.py and utils.py.
# cases I have seen triggering this is for example manage.py collectstatic inside a docker build where mysql is not available
# logger.debug('unable to get system_settings from database, constructing (new) default instance. Exception was:', exc_info=True)
Expand Down
Loading
Loading