diff --git a/src/sentry/api/helpers/group_index/delete.py b/src/sentry/api/helpers/group_index/delete.py index bd1461cc31b6c2..9745f0ec7198ad 100644 --- a/src/sentry/api/helpers/group_index/delete.py +++ b/src/sentry/api/helpers/group_index/delete.py @@ -10,10 +10,9 @@ from rest_framework.request import Request from rest_framework.response import Response -from sentry import audit_log, eventstream, options +from sentry import audit_log from sentry.api.base import audit_logger from sentry.deletions.defaults.group import GROUP_CHUNK_SIZE -from sentry.deletions.tasks.groups import delete_groups as delete_groups_task from sentry.deletions.tasks.groups import delete_groups_for_project from sentry.issues.grouptype import GroupCategory from sentry.models.group import Group, GroupStatus @@ -100,23 +99,13 @@ def delete_group_list( # `Group` instances that are pending deletion GroupInbox.objects.filter(project_id=project.id, group__id__in=group_ids).delete() - if options.get("deletions.groups.use-new-task"): - # Schedule a task per GROUP_CHUNK_SIZE batch of groups - for i in range(0, len(group_ids), GROUP_CHUNK_SIZE): - delete_groups_for_project.apply_async( - kwargs={ - "project_id": project.id, - "object_ids": group_ids[i : i + GROUP_CHUNK_SIZE], - "transaction_id": str(transaction_id), - } - ) - else: - eventstream_state = eventstream.backend.start_delete_groups(project.id, group_ids) - delete_groups_task.apply_async( + # Schedule a task per GROUP_CHUNK_SIZE batch of groups + for i in range(0, len(group_ids), GROUP_CHUNK_SIZE): + delete_groups_for_project.apply_async( kwargs={ - "object_ids": group_ids, + "project_id": project.id, + "object_ids": group_ids[i : i + GROUP_CHUNK_SIZE], "transaction_id": str(transaction_id), - "eventstream_state": eventstream_state, } ) diff --git a/src/sentry/deletions/tasks/groups.py b/src/sentry/deletions/tasks/groups.py index 5101cf377205aa..a2775cb51beb75 100644 --- a/src/sentry/deletions/tasks/groups.py +++ b/src/sentry/deletions/tasks/groups.py @@ -1,9 +1,9 @@ -from collections.abc import Mapping, Sequence +from collections.abc import Sequence from typing import Any import sentry_sdk -from sentry import deletions, eventstream +from sentry import deletions from sentry.deletions.defaults.group import GROUP_CHUNK_SIZE from sentry.deletions.tasks.scheduled import MAX_RETRIES, logger from sentry.exceptions import DeleteAborted @@ -16,75 +16,6 @@ from sentry.utils import metrics -@instrumented_task( - name="sentry.deletions.tasks.groups.delete_groups", - queue="cleanup", - default_retry_delay=60 * 5, - max_retries=MAX_RETRIES, - acks_late=True, - silo_mode=SiloMode.REGION, - taskworker_config=TaskworkerConfig( - namespace=deletion_tasks, - retry=Retry( - times=MAX_RETRIES, - delay=60 * 5, - ), - ), -) -@retry(exclude=(DeleteAborted,)) -@track_group_async_operation -def delete_groups( - object_ids: Sequence[int], - transaction_id: str, - eventstream_state: Mapping[str, Any] | None = None, - **kwargs: Any, -) -> None: - current_batch, rest = object_ids[:GROUP_CHUNK_SIZE], object_ids[GROUP_CHUNK_SIZE:] - - # Select first_group from current_batch to ensure project_id tag reflects the current batch - first_group = Group.objects.filter(id__in=current_batch).order_by("id").first() - if not first_group: - raise DeleteAborted("delete_groups.no_group_found") - - # The tags can be used if we want to find errors for when a task fails - sentry_sdk.set_tags( - { - "project_id": first_group.project_id, - "transaction_id": transaction_id, - }, - ) - - logger.info( - "delete_groups.started", - extra={ - "object_ids_count": len(object_ids), - "object_ids_current_batch": current_batch, - "first_id": first_group.id, - # These can be used when looking for logs in GCP - "project_id": first_group.project_id, - # All tasks initiated by the same request will have the same transaction_id - "transaction_id": transaction_id, - }, - ) - - task = deletions.get( - model=Group, query={"id__in": current_batch}, transaction_id=transaction_id - ) - has_more = task.chunk() - if has_more or rest: - delete_groups.apply_async( - kwargs={ - "object_ids": object_ids if has_more else rest, - "transaction_id": transaction_id, - "eventstream_state": eventstream_state, - }, - ) - else: - # all groups have been deleted - if eventstream_state: - eventstream.backend.end_delete_groups(eventstream_state) - - @instrumented_task( name="sentry.deletions.tasks.groups.delete_groups_for_project", queue="cleanup", @@ -142,9 +73,6 @@ def delete_groups_for_project( f"is greater than GROUP_CHUNK_SIZE" ) - # This is a no-op on the Snuba side, however, one day it may not be. - eventstream_state = eventstream.backend.start_delete_groups(project_id, object_ids) - # These can be used for debugging extra = {"project_id": project_id, "transaction_id": transaction_id} sentry_sdk.set_tags(extra) @@ -162,6 +90,3 @@ def delete_groups_for_project( # Use this to query the logs tags={"transaction_id": transaction_id}, ) - - # This will delete all Snuba events for all deleted groups - eventstream.backend.end_delete_groups(eventstream_state) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 0986650d64c199..e293bc3ab66758 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -856,12 +856,7 @@ flags=FLAG_AUTOMATOR_MODIFIABLE, ) -register( - "deletions.groups.use-new-task", - type=Bool, - default=False, - flags=FLAG_AUTOMATOR_MODIFIABLE, -) + register( "issues.severity.first-event-severity-calculation-projects-allowlist", type=Sequence, diff --git a/tests/sentry/issues/endpoints/test_organization_group_index.py b/tests/sentry/issues/endpoints/test_organization_group_index.py index a0e3231e00cc88..60c79cbbee8008 100644 --- a/tests/sentry/issues/endpoints/test_organization_group_index.py +++ b/tests/sentry/issues/endpoints/test_organization_group_index.py @@ -4311,78 +4311,55 @@ def assert_deleted_groups(self, groups: Sequence[Group]) -> None: assert not Group.objects.filter(id=group.id).exists() assert not GroupHash.objects.filter(group_id=group.id).exists() - @patch("sentry.eventstream.backend") - def test_delete_by_id(self, mock_eventstream: MagicMock) -> None: - eventstream_state = {"event_stream_state": str(uuid4())} - mock_eventstream.start_delete_groups = Mock(return_value=eventstream_state) + @patch("sentry.eventstream.snuba.SnubaEventStream._send") + @patch("sentry.eventstream.snuba.datetime") + def test_delete_by_id(self, mock_datetime: MagicMock, mock_send: MagicMock) -> None: + fixed_datetime = datetime.now() + mock_datetime.now.return_value = fixed_datetime - group1 = self.create_group(status=GroupStatus.RESOLVED) - group2 = self.create_group(status=GroupStatus.UNRESOLVED) - group3 = self.create_group(status=GroupStatus.IGNORED) - group4 = self.create_group( - project=self.create_project(slug="foo"), - status=GroupStatus.UNRESOLVED, - ) - - hashes = [] - for g in group1, group2, group3, group4: - hash = uuid4().hex - hashes.append(hash) - GroupHash.objects.create(project=g.project, hash=hash, group=g) + groups = self.create_n_groups_with_hashes(2, project=self.project) + group_ids = [group.id for group in groups] self.login_as(user=self.user) - with self.feature("organizations:global-views"): - response = self.get_response( - qs_params={"id": [group1.id, group2.id], "group4": group4.id} - ) - - mock_eventstream.start_delete_groups.assert_called_once_with( - group1.project_id, [group1.id, group2.id] - ) - - assert response.status_code == 204 - - assert Group.objects.get(id=group1.id).status == GroupStatus.PENDING_DELETION - assert not GroupHash.objects.filter(group_id=group1.id).exists() - - assert Group.objects.get(id=group2.id).status == GroupStatus.PENDING_DELETION - assert not GroupHash.objects.filter(group_id=group2.id).exists() - - assert Group.objects.get(id=group3.id).status != GroupStatus.PENDING_DELETION - assert GroupHash.objects.filter(group_id=group3.id).exists() - - assert Group.objects.get(id=group4.id).status != GroupStatus.PENDING_DELETION - assert GroupHash.objects.filter(group_id=group4.id).exists() - - Group.objects.filter(id__in=(group1.id, group2.id)).update(status=GroupStatus.UNRESOLVED) - - with self.tasks(): - with self.feature("organizations:global-views"): - response = self.get_response( - qs_params={"id": [group1.id, group2.id], "group4": group4.id} - ) + with self.tasks(), self.feature("organizations:global-views"): + response = self.get_response(qs_params={"id": group_ids}) + assert response.status_code == 204 - # XXX(markus): Something is sending duplicated replacements to snuba -- - # once from within tasks.deletions.groups and another time from - # sentry.deletions.defaults.groups - assert mock_eventstream.end_delete_groups.call_args_list == [ - call(eventstream_state), - call(eventstream_state), + # Extract transaction_id from the first call + transaction_id = mock_send.call_args_list[0][1]["extra_data"][0]["transaction_id"] + + assert mock_send.call_args_list == [ + call( + self.project.id, + "start_delete_groups", + extra_data=( + { + "transaction_id": transaction_id, + "project_id": self.project.id, + "group_ids": group_ids, + "datetime": json.datetime_to_str(fixed_datetime), + }, + ), + asynchronous=False, + ), + call( + self.project.id, + "end_delete_groups", + extra_data=( + { + "transaction_id": transaction_id, + "project_id": self.project.id, + "group_ids": group_ids, + "datetime": json.datetime_to_str(fixed_datetime), + }, + ), + asynchronous=False, + ), ] - assert response.status_code == 204 - - assert not Group.objects.filter(id=group1.id).exists() - assert not GroupHash.objects.filter(group_id=group1.id).exists() - - assert not Group.objects.filter(id=group2.id).exists() - assert not GroupHash.objects.filter(group_id=group2.id).exists() - - assert Group.objects.filter(id=group3.id).exists() - assert GroupHash.objects.filter(group_id=group3.id).exists() - - assert Group.objects.filter(id=group4.id).exists() - assert GroupHash.objects.filter(group_id=group4.id).exists() + for group in groups: + assert not Group.objects.filter(id=group.id).exists() + assert not GroupHash.objects.filter(group_id=group.id).exists() @patch("sentry.eventstream.backend") def test_delete_performance_issue_by_id(self, mock_eventstream: MagicMock) -> None: @@ -4417,85 +4394,6 @@ def test_bulk_delete_for_many_projects_without_option(self) -> None: groups_1 = self.create_n_groups_with_hashes(2, project=self.project) groups_2 = self.create_n_groups_with_hashes(5, project=project_2) - with ( - self.tasks(), - patch("sentry.deletions.tasks.groups.GROUP_CHUNK_SIZE", NEW_CHUNK_SIZE), - patch("sentry.deletions.tasks.groups.logger") as mock_logger, - patch( - "sentry.api.helpers.group_index.delete.uuid4", - side_effect=[self.get_mock_uuid("foo"), self.get_mock_uuid("bar")], - ), - ): - self.login_as(user=self.user) - response = self.get_success_response(qs_params={"query": ""}) - assert response.status_code == 204 - batch_1 = [g.id for g in groups_2[0:2]] - batch_2 = [g.id for g in groups_2[2:4]] - batch_3 = [g.id for g in groups_2[4:]] - assert batch_1 + batch_2 + batch_3 == [g.id for g in groups_2] - - calls_by_project: dict[int, list[tuple[str, dict[str, Any]]]] = defaultdict(list) - for log_call in mock_logger.info.call_args_list: - calls_by_project[log_call[1]["extra"]["project_id"]].append(log_call) - - assert len(calls_by_project) == 2 - assert calls_by_project[self.project.id] == [ - call( - "delete_groups.started", - extra={ - "object_ids_count": len(groups_1), - "object_ids_current_batch": [g.id for g in groups_1], - "first_id": groups_1[0].id, - "project_id": self.project.id, - "transaction_id": "bar", - }, - ), - ] - assert calls_by_project[project_2.id] == [ - call( - "delete_groups.started", - extra={ - "object_ids_count": 5, - "object_ids_current_batch": batch_1, - "first_id": batch_1[0], - "project_id": project_2.id, - "transaction_id": "foo", - }, - ), - call( - "delete_groups.started", - extra={ - "object_ids_count": 3, - "object_ids_current_batch": batch_2, - "first_id": batch_2[0], - "project_id": project_2.id, - "transaction_id": "foo", - }, - ), - call( - "delete_groups.started", - extra={ - "object_ids_count": 1, - "object_ids_current_batch": batch_3, - "first_id": batch_3[0], - "project_id": project_2.id, - "transaction_id": "foo", - }, - ), - ] - - self.assert_deleted_groups(groups_1 + groups_2) - - def test_bulk_delete_for_many_projects_with_option(self) -> None: - NEW_CHUNK_SIZE = 2 - with ( - self.options({"deletions.groups.use-new-task": True}), - self.feature("organizations:global-views"), - ): - project_2 = self.create_project(slug="baz", organization=self.organization) - groups_1 = self.create_n_groups_with_hashes(2, project=self.project) - groups_2 = self.create_n_groups_with_hashes(5, project=project_2) - with ( self.tasks(), patch("sentry.api.helpers.group_index.delete.GROUP_CHUNK_SIZE", NEW_CHUNK_SIZE), diff --git a/tests/snuba/api/endpoints/test_group_details.py b/tests/snuba/api/endpoints/test_group_details.py index 6f6a0c0cded47d..8557209a5fb4da 100644 --- a/tests/snuba/api/endpoints/test_group_details.py +++ b/tests/snuba/api/endpoints/test_group_details.py @@ -326,7 +326,7 @@ def test_delete_error_issue(self) -> Any: url = f"/api/0/issues/{group.id}/" with patch( - "sentry.api.helpers.group_index.delete.delete_groups_task.apply_async" + "sentry.api.helpers.group_index.delete.delete_groups_for_project.apply_async" ) as mock_apply_async: response = self.client.delete(url, format="json") mock_apply_async.assert_called_once() @@ -353,7 +353,7 @@ def test_delete_issue_platform_issue(self) -> Any: url = f"/api/0/issues/{group.id}/" with patch( - "sentry.api.helpers.group_index.delete.delete_groups_task.apply_async" + "sentry.api.helpers.group_index.delete.delete_groups_for_project.apply_async" ) as mock_apply_async: response = self.client.delete(url, format="json") assert response.status_code == 202 diff --git a/tests/snuba/api/endpoints/test_project_group_index.py b/tests/snuba/api/endpoints/test_project_group_index.py index 44afe5b7d051e2..186ae4f2878772 100644 --- a/tests/snuba/api/endpoints/test_project_group_index.py +++ b/tests/snuba/api/endpoints/test_project_group_index.py @@ -4,7 +4,7 @@ from collections.abc import Sequence from datetime import timedelta from functools import cached_property -from unittest.mock import Mock, call, patch +from unittest.mock import MagicMock, Mock, patch from urllib.parse import quote from uuid import uuid4 @@ -1529,55 +1529,23 @@ def assert_audit_log_entry(self, groups: Sequence[Group], mock_record_audit_log: assert calls[i].kwargs["event"].actor_user_id == self.user.id assert calls[i].kwargs["event"].data["issue_id"] == group.id - @patch("sentry.eventstream.backend") @patch("sentry.utils.audit.log_service.record_audit_log") - def test_delete_by_id(self, mock_record_audit_log, mock_eventstream): - eventstream_state = {"event_stream_state": uuid4().hex} - mock_eventstream.start_delete_groups = Mock(return_value=eventstream_state) - - groups = self.create_groups( - [ - (GroupStatus.RESOLVED, self.project, None), - (GroupStatus.UNRESOLVED, self.project, None), - (GroupStatus.IGNORED, self.project, None), - (GroupStatus.UNRESOLVED, self.create_project(slug="foo"), None), - ], - ) - group1, group2, group3, group4 = groups + def test_delete_by_id(self, mock_record_audit_log: MagicMock) -> None: + group1, group2 = self.create_n_groups_with_hashes(2, self.project) + groups_to_deleted = [group1, group2] + group3 = self.create_n_groups_with_hashes(1, project=self.create_project(slug="foo"))[0] self.login_as(user=self.user) - # Group 4 will not be deleted because it belongs to a different project - url = f"{self.path}?id={group1.id}&id={group2.id}&id={group4.id}" - - response = self.client.delete(url, format="json") - - mock_eventstream.start_delete_groups.assert_called_once_with( - group1.project_id, [group1.id, group2.id] - ) - - assert response.status_code == 204 - - self.assert_groups_being_deleted([group1, group2]) - # Group 4 is not deleted because it belongs to a different project - self.assert_groups_not_deleted([group3, group4]) + # Group 3 will not be deleted because it belongs to a different project + url = f"{self.path}?id={group1.id}&id={group2.id}&id={group3.id}" with self.tasks(): response = self.client.delete(url, format="json") + assert response.status_code == 204 - # XXX(markus): Something is sending duplicated replacements to snuba -- - # once from within tasks.deletions.groups and another time from - # sentry.deletions.defaults.groups - assert mock_eventstream.end_delete_groups.call_args_list == [ - call(eventstream_state), - call(eventstream_state), - ] - - self.assert_audit_log_entry([group1, group2], mock_record_audit_log) - - assert response.status_code == 204 - - self.assert_groups_are_gone([group1, group2]) - self.assert_groups_not_deleted([group3, group4]) + self.assert_audit_log_entry(groups_to_deleted, mock_record_audit_log) + self.assert_groups_are_gone(groups_to_deleted) + self.assert_groups_not_deleted([group3]) @patch("sentry.eventstream.backend") def test_delete_performance_issue_by_id(self, mock_eventstream):