From ae6dd8677778c7efd630872cf809cf750a4babf4 Mon Sep 17 00:00:00 2001 From: "seer-by-sentry[bot]" <157164994+seer-by-sentry[bot]@users.noreply.github.com> Date: Tue, 11 Nov 2025 18:18:37 +0000 Subject: [PATCH 1/3] fix: Batch commit file change queries to avoid timeouts --- src/sentry/utils/committers.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/src/sentry/utils/committers.py b/src/sentry/utils/committers.py index afdb025ba46dc6..f9bbe01af2373c 100644 --- a/src/sentry/utils/committers.py +++ b/src/sentry/utils/committers.py @@ -26,6 +26,9 @@ from sentry.utils.hashlib import hash_values PATH_SEPARATORS = frozenset(["/", "\\"]) +# Limit the number of commits to batch in a single query to avoid query timeouts +# from large IN clauses combined with complex LIKE conditions +COMMIT_BATCH_SIZE = 50 def tokenize_path(path: str) -> Iterator[str]: @@ -96,11 +99,19 @@ def _get_commit_file_changes( # build a single query to get all of the commit file that might match the first n frames path_query = reduce(operator.or_, (Q(filename__iendswith=path) for path in filenames)) - commit_file_change_matches = CommitFileChange.objects.filter( - path_query, commit_id__in=[c.id for c in commits] - ) + # Batch commits to avoid query timeouts from large IN clauses + # combined with complex LIKE conditions + all_file_changes: list[CommitFileChange] = [] + commit_ids = [c.id for c in commits] + + for i in range(0, len(commit_ids), COMMIT_BATCH_SIZE): + batch_commit_ids = commit_ids[i:i + COMMIT_BATCH_SIZE] + commit_file_change_matches = CommitFileChange.objects.filter( + path_query, commit_id__in=batch_commit_ids + ) + all_file_changes.extend(list(commit_file_change_matches)) - return list(commit_file_change_matches) + return all_file_changes def _match_commits_paths( From 8b3d8dc951db8b5e012e1cef4bf892aee383dfd8 Mon Sep 17 00:00:00 2001 From: "getsantry[bot]" <66042841+getsantry[bot]@users.noreply.github.com> Date: Tue, 11 Nov 2025 18:19:39 +0000 Subject: [PATCH 2/3] :hammer_and_wrench: apply pre-commit fixes --- src/sentry/utils/committers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/utils/committers.py b/src/sentry/utils/committers.py index f9bbe01af2373c..c6ca2ad40c9dd4 100644 --- a/src/sentry/utils/committers.py +++ b/src/sentry/utils/committers.py @@ -103,9 +103,9 @@ def _get_commit_file_changes( # combined with complex LIKE conditions all_file_changes: list[CommitFileChange] = [] commit_ids = [c.id for c in commits] - + for i in range(0, len(commit_ids), COMMIT_BATCH_SIZE): - batch_commit_ids = commit_ids[i:i + COMMIT_BATCH_SIZE] + batch_commit_ids = commit_ids[i : i + COMMIT_BATCH_SIZE] commit_file_change_matches = CommitFileChange.objects.filter( path_query, commit_id__in=batch_commit_ids ) From 46ae8f486637e167bb1b4e2b226d51a39ed5e0dd Mon Sep 17 00:00:00 2001 From: Yuval Mandelboum Date: Tue, 11 Nov 2025 16:59:39 -0800 Subject: [PATCH 3/3] refactor to use `chunked` for batching --- src/sentry/utils/committers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/utils/committers.py b/src/sentry/utils/committers.py index c6ca2ad40c9dd4..7ac7b3d8b4c687 100644 --- a/src/sentry/utils/committers.py +++ b/src/sentry/utils/committers.py @@ -24,6 +24,7 @@ from sentry.users.services.user.service import user_service from sentry.utils.event_frames import find_stack_frames, munged_filename_and_frames from sentry.utils.hashlib import hash_values +from sentry.utils.iterators import chunked PATH_SEPARATORS = frozenset(["/", "\\"]) # Limit the number of commits to batch in a single query to avoid query timeouts @@ -104,8 +105,7 @@ def _get_commit_file_changes( all_file_changes: list[CommitFileChange] = [] commit_ids = [c.id for c in commits] - for i in range(0, len(commit_ids), COMMIT_BATCH_SIZE): - batch_commit_ids = commit_ids[i : i + COMMIT_BATCH_SIZE] + for batch_commit_ids in chunked(commit_ids, COMMIT_BATCH_SIZE): commit_file_change_matches = CommitFileChange.objects.filter( path_query, commit_id__in=batch_commit_ids )