From 455bbca9d5fd82057e21d0b407f3e77e28fa648b Mon Sep 17 00:00:00 2001 From: Ogi Date: Mon, 23 Mar 2026 12:24:30 +0100 Subject: [PATCH 01/16] fix(ai-insights): sort conversation spans by start timestamp (#111185) Sort spans by `start_timestamp` instead of `end_timestamp` in the conversation hook so the AI Spans list displays spans in the order they began. The Messages panel is unaffected because `extractMessagesFromNodes` applies its own internal sort by `end_timestamp`. --------- Co-authored-by: Claude Opus 4.6 --- .../hooks/useConversation.spec.tsx | 50 +++++++++++++++++++ .../conversations/hooks/useConversation.tsx | 2 +- .../utils/conversationMessages.spec.ts | 21 ++++++-- 3 files changed, 68 insertions(+), 5 deletions(-) diff --git a/static/app/views/insights/pages/conversations/hooks/useConversation.spec.tsx b/static/app/views/insights/pages/conversations/hooks/useConversation.spec.tsx index 6149889d610c33..4d835c2367de18 100644 --- a/static/app/views/insights/pages/conversations/hooks/useConversation.spec.tsx +++ b/static/app/views/insights/pages/conversations/hooks/useConversation.spec.tsx @@ -316,6 +316,56 @@ describe('useConversation', () => { expect(value?.name).toBe('AI generation'); }); + it('sorts nodes by start timestamp for AI spans list', async () => { + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/ai-conversations/conv-sort/`, + body: [ + { + 'gen_ai.conversation.id': 'conv-sort', + parent_span: 'parent-1', + 'precise.finish_ts': 1002.0, + 'precise.start_ts': 1001.0, + project: 'test-project', + 'project.id': 1, + 'span.description': 'Second by start, first by end', + 'span.op': 'gen_ai.generate', + 'span.status': 'ok', + span_id: 'span-b', + trace: 'trace-sort', + 'gen_ai.operation.type': 'ai_client', + }, + { + 'gen_ai.conversation.id': 'conv-sort', + parent_span: 'parent-1', + 'precise.finish_ts': 1003.0, + 'precise.start_ts': 1000.0, + project: 'test-project', + 'project.id': 1, + 'span.description': 'First by start, second by end', + 'span.op': 'gen_ai.generate', + 'span.status': 'ok', + span_id: 'span-a', + trace: 'trace-sort', + 'gen_ai.operation.type': 'ai_client', + }, + ], + }); + + const {result} = renderHookWithProviders( + () => useConversation({conversationId: 'conv-sort'}), + {organization} + ); + + await waitFor(() => { + expect(result.current.isLoading).toBe(false); + }); + + expect(result.current.nodes).toHaveLength(2); + // Sorted by start timestamp: span-a (1000) before span-b (1001) + expect(result.current.nodes[0]?.id).toBe('span-a'); + expect(result.current.nodes[1]?.id).toBe('span-b'); + }); + it('filters to only gen_ai spans', async () => { MockApiClient.addMockResponse({ url: `/organizations/${organization.slug}/ai-conversations/conv-filter/`, diff --git a/static/app/views/insights/pages/conversations/hooks/useConversation.tsx b/static/app/views/insights/pages/conversations/hooks/useConversation.tsx index 390aa3d67e0b68..c12d5616e7d550 100644 --- a/static/app/views/insights/pages/conversations/hooks/useConversation.tsx +++ b/static/app/views/insights/pages/conversations/hooks/useConversation.tsx @@ -229,7 +229,7 @@ export function useConversation( return node; }); - transformedNodes.sort((a, b) => (a.endTimestamp ?? 0) - (b.endTimestamp ?? 0)); + transformedNodes.sort((a, b) => (a.startTimestamp ?? 0) - (b.startTimestamp ?? 0)); return {nodes: transformedNodes, nodeTraceMap: traceMap}; }, [conversationQuery.data]); diff --git a/static/app/views/insights/pages/conversations/utils/conversationMessages.spec.ts b/static/app/views/insights/pages/conversations/utils/conversationMessages.spec.ts index 645d72f97729a9..354df3eae35c98 100644 --- a/static/app/views/insights/pages/conversations/utils/conversationMessages.spec.ts +++ b/static/app/views/insights/pages/conversations/utils/conversationMessages.spec.ts @@ -322,23 +322,36 @@ describe('conversationMessages utilities', () => { expect(result.toolSpans[0]?.id).toBe('tool-1'); }); - it('sorts by timestamp', () => { - const gen1 = createMockNode({id: 'gen-1', startTimestamp: 2000}); - const gen2 = createMockNode({id: 'gen-2', startTimestamp: 1000}); + it('sorts by end timestamp, not start timestamp', () => { + // gen-1 starts later but ends first + const gen1 = createMockNode({ + id: 'gen-1', + startTimestamp: 2000, + endTimestamp: 2100, + }); + // gen-2 starts earlier but ends later + const gen2 = createMockNode({ + id: 'gen-2', + startTimestamp: 1000, + endTimestamp: 3000, + }); const tool1 = createMockToolNode({ id: 'tool-1', toolName: 'a', startTimestamp: 3000, + endTimestamp: 3500, }); const tool2 = createMockToolNode({ id: 'tool-2', toolName: 'b', startTimestamp: 1500, + endTimestamp: 1600, }); const result = partitionSpansByType([gen1, gen2, tool1, tool2] as any); - expect(result.generationSpans.map(s => s.id)).toEqual(['gen-2', 'gen-1']); + // Sorted by end_timestamp: gen-1 (2100) before gen-2 (3000) + expect(result.generationSpans.map(s => s.id)).toEqual(['gen-1', 'gen-2']); expect(result.toolSpans.map(s => s.id)).toEqual(['tool-2', 'tool-1']); }); From e434a0f5b343c261c3fe331b4df34287a12d3a0f Mon Sep 17 00:00:00 2001 From: Ogi Date: Mon, 23 Mar 2026 12:54:20 +0100 Subject: [PATCH 02/16] fix(ai-insights): token input/output count (#111284) --- .../details/highlightedAttributes.tsx | 62 +++++++++++++++++-- 1 file changed, 56 insertions(+), 6 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/highlightedAttributes.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/highlightedAttributes.tsx index 94c8aa39c3550c..aad22481b0ec7e 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/highlightedAttributes.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/highlightedAttributes.tsx @@ -355,6 +355,42 @@ function HighlightedTools({ ); } +// Per our and OTel conventions, input_tokens includes cached and output_tokens includes +// reasoning. Some providers don't do this, so we detect the gap and adjust as a fallback. +function getDisplayInputTokens( + inputTokens: number, + cachedTokens: number, + outputTokens: number, + totalTokens: number +): number { + if (cachedTokens <= 0) { + return inputTokens; + } + const without = inputTokens + outputTokens; + const withCached = without + cachedTokens; + if (Math.abs(withCached - totalTokens) < Math.abs(without - totalTokens)) { + return inputTokens + cachedTokens; + } + return inputTokens; +} + +function getDisplayOutputTokens( + displayInput: number, + outputTokens: number, + reasoningTokens: number, + totalTokens: number +): number { + if (reasoningTokens <= 0) { + return outputTokens; + } + const without = displayInput + outputTokens; + const withReasoning = without + reasoningTokens; + if (Math.abs(withReasoning - totalTokens) < Math.abs(without - totalTokens)) { + return outputTokens + reasoningTokens; + } + return outputTokens; +} + function HighlightedTokenAttributes({ inputTokens, cachedTokens, @@ -368,6 +404,22 @@ function HighlightedTokenAttributes({ reasoningTokens: number; totalTokens: number; }) { + const effectiveCached = isNaN(cachedTokens) ? 0 : cachedTokens; + const effectiveReasoning = isNaN(reasoningTokens) ? 0 : reasoningTokens; + + const displayInput = getDisplayInputTokens( + inputTokens, + effectiveCached, + outputTokens, + totalTokens + ); + const displayOutput = getDisplayOutputTokens( + displayInput, + outputTokens, + effectiveReasoning, + totalTokens + ); + return ( {t('Input')} {inputTokens.toString()} {t('Cached')} - {isNaN(cachedTokens) ? '0' : cachedTokens.toString()} + {effectiveCached.toString()} {t('Output')} {outputTokens.toString()} {t('Reasoning')} - - {isNaN(reasoningTokens) ? '0' : reasoningTokens.toString()} - + {effectiveReasoning.toString()} {t('Total')} {totalTokens.toString()} @@ -389,11 +439,11 @@ function HighlightedTokenAttributes({ > - {t('in')} + {t('in')} + - {t('out')} + {t('out')} = From 0e064c1705df3a51033f409a23c71dc0a93e91a1 Mon Sep 17 00:00:00 2001 From: Colton Allen Date: Mon, 23 Mar 2026 08:30:02 -0500 Subject: [PATCH 03/16] feat(scm): Move static facade to module level and dynamically construct the SCM type from the provider's capabilities (#111113) - API is compatible so minimal test coverage changes. - The static facade really helped in keeping the test coverage simple. - The static facade seems to have helped in the RPC too. - Catching AttributeErrors and re-routing to the client to handle. This is a little broad ATM. I'd like to only catch attribute errors triggered by provider/action mismatches. --- .github/CODEOWNERS | 1 + .github/codeowners-coverage-baseline.txt | 16 - src/sentry/scm/actions.py | 1126 ++++++++++---------- src/sentry/scm/private/facade.py | 87 ++ src/sentry/scm/private/provider.py | 105 +- src/sentry/scm/private/rpc.py | 153 ++- tests/sentry/scm/endpoints/test_scm_rpc.py | 27 + tests/sentry/scm/unit/test_scm_actions.py | 330 +++--- 8 files changed, 983 insertions(+), 862 deletions(-) create mode 100644 src/sentry/scm/private/facade.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 7b403b5175646b..f52324813ffeb7 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -818,6 +818,7 @@ tests/sentry/api/endpoints/test_organization_attribute_mappings.py @get /tests/sentry/releases/ @getsentry/coding-workflows-sentry-backend ## SCM +/src/sentry/scm/ @getsentry/scm /src/sentry/integrations/source_code_management/ @getsentry/product-owners-settings-integrations @getsentry/ecosystem @getsentry/scm /src/sentry/integrations/repository/ @getsentry/product-owners-settings-integrations @getsentry/ecosystem @getsentry/scm /src/sentry/integrations/github_enterprise/ @getsentry/product-owners-settings-integrations @getsentry/ecosystem @getsentry/scm diff --git a/.github/codeowners-coverage-baseline.txt b/.github/codeowners-coverage-baseline.txt index 6452cdf1bb5971..19fa98f658e53a 100644 --- a/.github/codeowners-coverage-baseline.txt +++ b/.github/codeowners-coverage-baseline.txt @@ -236,22 +236,6 @@ src/sentry/runner/importer.py src/sentry/runner/initializer.py src/sentry/runner/main.py src/sentry/runner/settings.py -src/sentry/scm/actions.py -src/sentry/scm/apps.py -src/sentry/scm/endpoints/scm_rpc.py -src/sentry/scm/errors.py -src/sentry/scm/private/event_stream.py -src/sentry/scm/private/helpers.py -src/sentry/scm/private/ipc.py -src/sentry/scm/private/provider.py -src/sentry/scm/private/providers/github.py -src/sentry/scm/private/providers/gitlab.py -src/sentry/scm/private/rpc.py -src/sentry/scm/private/stream_producer.py -src/sentry/scm/private/webhooks/github.py -src/sentry/scm/stream.py -src/sentry/scm/types.py -src/sentry/scm/utils.py src/sentry/scripts/alerts/conditional_zrem.lua src/sentry/scripts/digests/digests.lua src/sentry/scripts/quotas/is_rate_limited.lua diff --git a/src/sentry/scm/actions.py b/src/sentry/scm/actions.py index df23078197b0cc..27efa7c8038a09 100644 --- a/src/sentry/scm/actions.py +++ b/src/sentry/scm/actions.py @@ -4,9 +4,8 @@ from sentry.integrations.models.integration import Integration from sentry.integrations.services.integration.model import RpcIntegration from sentry.models.repository import Repository as RepositoryModel -from sentry.scm.errors import SCMProviderNotSupported +from sentry.scm.private.facade import Facade from sentry.scm.private.helpers import ( - exec_provider_fn, fetch_repository, fetch_service_provider, initialize_provider, @@ -14,7 +13,57 @@ map_repository_model_to_repository, ) from sentry.scm.private.ipc import record_count_metric -from sentry.scm.private.provider import ActionMap, Provider +from sentry.scm.private.provider import ( + CompareCommitsProtocol, + CreateBranchProtocol, + CreateCheckRunProtocol, + CreateGitBlobProtocol, + CreateGitCommitProtocol, + CreateGitTreeProtocol, + CreateIssueCommentProtocol, + CreateIssueCommentReactionProtocol, + CreateIssueReactionProtocol, + CreatePullRequestCommentProtocol, + CreatePullRequestCommentReactionProtocol, + CreatePullRequestDraftProtocol, + CreatePullRequestProtocol, + CreatePullRequestReactionProtocol, + CreateReviewCommentFileProtocol, + CreateReviewCommentReplyProtocol, + CreateReviewProtocol, + DeleteIssueCommentProtocol, + DeleteIssueCommentReactionProtocol, + DeleteIssueReactionProtocol, + DeletePullRequestCommentProtocol, + DeletePullRequestCommentReactionProtocol, + DeletePullRequestReactionProtocol, + GetArchiveLinkProtocol, + GetBranchProtocol, + GetCheckRunProtocol, + GetCommitProtocol, + GetCommitsByPathProtocol, + GetCommitsProtocol, + GetFileContentProtocol, + GetGitCommitProtocol, + GetIssueCommentReactionsProtocol, + GetIssueCommentsProtocol, + GetIssueReactionsProtocol, + GetPullRequestCommentReactionsProtocol, + GetPullRequestCommentsProtocol, + GetPullRequestCommitsProtocol, + GetPullRequestDiffProtocol, + GetPullRequestFilesProtocol, + GetPullRequestProtocol, + GetPullRequestReactionsProtocol, + GetPullRequestsProtocol, + GetTreeProtocol, + MinimizeCommentProtocol, + Provider, + RequestReviewProtocol, + UpdateBranchProtocol, + UpdateCheckRunProtocol, + UpdatePullRequestProtocol, +) from sentry.scm.types import ( SHA, ActionResult, @@ -54,26 +103,16 @@ ) -class SourceCodeManager: - def __init__( - self, - provider: Provider, - *, - referrer: Referrer = "shared", - record_count: Callable[[str, int, dict[str, str]], None] = record_count_metric, - ): - """ - The SourceCodeManager class manages ACLs, rate-limits, environment setup, and a - vendor-agnostic mapping of actions to service-provider commands. The SourceCodeManager - exposes a declarative interface. Developers declare what they want and the concrete - implementation details of what's done are abstracted. - - The SourceCodeManager _will_ throw exceptions. That is its intended operating mode. In your - application code you are expected to catch the base SCMError type. - """ - self.provider = provider - self.referrer = referrer - self.record_count = record_count +class SourceCodeManager(Facade): + """ + The SourceCodeManager class manages ACLs, rate-limits, environment setup, and a + vendor-agnostic mapping of actions to service-provider commands. The SourceCodeManager + exposes a declarative interface. Developers declare what they want and the concrete + implementation details of what's done are abstracted. + + The SourceCodeManager _will_ throw exceptions. That is its intended operating mode. In your + application code you are expected to catch the base SCMError type. + """ @classmethod def make_from_repository_id( @@ -117,559 +156,496 @@ def make_from_integration( return cls(provider, referrer=referrer, record_count=record_count) - def _exec[P, T](self, protocol: type[P], provider_fn: Callable[[P], T]) -> T: - provider = self.provider - if not isinstance(provider, protocol): - raise SCMProviderNotSupported("Action not supported.") - - return exec_provider_fn( - self.provider, - referrer=self.referrer, - provider_fn=lambda: provider_fn(provider), - record_count=self.record_count, - ) - - def can(self, actions: list[str]) -> dict[str, bool]: - """ - Returns the named set of actions the SourceCodeManager can execute against the target API. - - Interactions with source code management services are not transactional. There are many - failure points in the process and partial states are a reality that must be handled. One - common failure mode is a mismatch of expectations between what the SCM supports and what a - SCM provider can actually accommodate. By asking up front, "can the provider for this - customer accommodate all the actions I need to execute?" a developer can eagerly exit or - alter some behavior when we know the request will fail deterministically. This eliminates - the need to clean-up side-effects after a partially implemented SCM provider fails. - """ - return { - action: hasattr(ActionMap, action) - and isinstance(self.provider, getattr(ActionMap, action)) - for action in actions - } - - def get_issue_comments( - self, - issue_id: str, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[Comment]: - """Get comments on an issue.""" - return self._exec( - ActionMap.get_issue_comments, # type: ignore[type-abstract] - lambda p: p.get_issue_comments(issue_id, pagination, request_options), - ) - - def create_issue_comment(self, issue_id: str, body: str) -> ActionResult[Comment]: - """Create a comment on an issue.""" - return self._exec( - ActionMap.create_issue_comment, # type: ignore[type-abstract] - lambda p: p.create_issue_comment(issue_id, body), - ) - - def delete_issue_comment(self, issue_id: str, comment_id: str) -> None: - """Delete a comment on an issue.""" - return self._exec( - ActionMap.delete_issue_comment, # type: ignore[type-abstract] - lambda p: p.delete_issue_comment(issue_id, comment_id), - ) - - def get_pull_request( - self, - pull_request_id: str, - request_options: RequestOptions | None = None, - ) -> ActionResult[PullRequest]: - """Get a pull request.""" - return self._exec( - ActionMap.get_pull_request, # type: ignore[type-abstract] - lambda p: p.get_pull_request(pull_request_id, request_options), - ) - - def get_pull_request_comments( - self, - pull_request_id: str, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[Comment]: - """Get comments on a pull request.""" - return self._exec( - ActionMap.get_pull_request_comments, # type: ignore[type-abstract] - lambda p: p.get_pull_request_comments(pull_request_id, pagination, request_options), - ) - - def create_pull_request_comment(self, pull_request_id: str, body: str) -> ActionResult[Comment]: - """Create a comment on a pull request.""" - return self._exec( - ActionMap.create_pull_request_comment, # type: ignore[type-abstract] - lambda p: p.create_pull_request_comment(pull_request_id, body), - ) - - def delete_pull_request_comment(self, pull_request_id: str, comment_id: str) -> None: - """Delete a comment on a pull request.""" - return self._exec( - ActionMap.delete_pull_request_comment, # type: ignore[type-abstract] - lambda p: p.delete_pull_request_comment(pull_request_id, comment_id), - ) - - def get_issue_comment_reactions( - self, - issue_id: str, - comment_id: str, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[ReactionResult]: - """Get reactions on an issue comment.""" - return self._exec( - ActionMap.get_issue_comment_reactions, # type: ignore[type-abstract] - lambda p: p.get_issue_comment_reactions( - issue_id, comment_id, pagination, request_options - ), - ) - - def create_issue_comment_reaction( - self, issue_id: str, comment_id: str, reaction: Reaction - ) -> ActionResult[ReactionResult]: - """Create a reaction on an issue comment.""" - return self._exec( - ActionMap.create_issue_comment_reaction, # type: ignore[type-abstract] - lambda p: p.create_issue_comment_reaction(issue_id, comment_id, reaction), - ) - - def delete_issue_comment_reaction( - self, issue_id: str, comment_id: str, reaction_id: str - ) -> None: - """Delete a reaction on an issue comment.""" - return self._exec( - ActionMap.delete_issue_comment_reaction, # type: ignore[type-abstract] - lambda p: p.delete_issue_comment_reaction(issue_id, comment_id, reaction_id), - ) - - def get_pull_request_comment_reactions( - self, - pull_request_id: str, - comment_id: str, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[ReactionResult]: - """Get reactions on a pull request comment.""" - return self._exec( - ActionMap.get_pull_request_comment_reactions, # type: ignore[type-abstract] - lambda p: p.get_pull_request_comment_reactions( - pull_request_id, comment_id, pagination, request_options - ), - ) - - def create_pull_request_comment_reaction( - self, pull_request_id: str, comment_id: str, reaction: Reaction - ) -> ActionResult[ReactionResult]: - """Create a reaction on a pull request comment.""" - return self._exec( - ActionMap.create_pull_request_comment_reaction, # type: ignore[type-abstract] - lambda p: p.create_pull_request_comment_reaction(pull_request_id, comment_id, reaction), - ) - - def delete_pull_request_comment_reaction( - self, pull_request_id: str, comment_id: str, reaction_id: str - ) -> None: - """Delete a reaction on a pull request comment.""" - return self._exec( - ActionMap.delete_pull_request_comment_reaction, # type: ignore[type-abstract] - lambda p: p.delete_pull_request_comment_reaction( - pull_request_id, comment_id, reaction_id - ), - ) - def get_issue_reactions( - self, - issue_id: str, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[ReactionResult]: - """Get reactions on an issue.""" - return self._exec( - ActionMap.get_issue_reactions, # type: ignore[type-abstract] - lambda p: p.get_issue_reactions(issue_id, pagination, request_options), - ) +def get_issue_comments( + scm: GetIssueCommentsProtocol, + issue_id: str, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[Comment]: + """Get comments on an issue.""" + return scm.get_issue_comments(issue_id, pagination, request_options) + + +def create_issue_comment( + scm: CreateIssueCommentProtocol, issue_id: str, body: str +) -> ActionResult[Comment]: + """Create a comment on an issue.""" + return scm.create_issue_comment(issue_id, body) + + +def delete_issue_comment(scm: DeleteIssueCommentProtocol, issue_id: str, comment_id: str) -> None: + """Delete a comment on an issue.""" + return scm.delete_issue_comment(issue_id, comment_id) + + +def get_pull_request( + scm: GetPullRequestProtocol, + pull_request_id: str, + request_options: RequestOptions | None = None, +) -> ActionResult[PullRequest]: + """Get a pull request.""" + return scm.get_pull_request(pull_request_id, request_options) + + +def get_pull_request_comments( + scm: GetPullRequestCommentsProtocol, + pull_request_id: str, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[Comment]: + """Get comments on a pull request.""" + return scm.get_pull_request_comments(pull_request_id, pagination, request_options) + + +def create_pull_request_comment( + scm: CreatePullRequestCommentProtocol, pull_request_id: str, body: str +) -> ActionResult[Comment]: + """Create a comment on a pull request.""" + return scm.create_pull_request_comment(pull_request_id, body) + + +def delete_pull_request_comment( + scm: DeletePullRequestCommentProtocol, pull_request_id: str, comment_id: str +) -> None: + """Delete a comment on a pull request.""" + return scm.delete_pull_request_comment(pull_request_id, comment_id) + + +def get_issue_comment_reactions( + scm: GetIssueCommentReactionsProtocol, + issue_id: str, + comment_id: str, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[ReactionResult]: + """Get reactions on an issue comment.""" + return scm.get_issue_comment_reactions(issue_id, comment_id, pagination, request_options) + + +def create_issue_comment_reaction( + scm: CreateIssueCommentReactionProtocol, issue_id: str, comment_id: str, reaction: Reaction +) -> ActionResult[ReactionResult]: + """Create a reaction on an issue comment.""" + return scm.create_issue_comment_reaction(issue_id, comment_id, reaction) + + +def delete_issue_comment_reaction( + scm: DeleteIssueCommentReactionProtocol, issue_id: str, comment_id: str, reaction_id: str +) -> None: + """Delete a reaction on an issue comment.""" + return scm.delete_issue_comment_reaction(issue_id, comment_id, reaction_id) + + +def get_pull_request_comment_reactions( + scm: GetPullRequestCommentReactionsProtocol, + pull_request_id: str, + comment_id: str, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[ReactionResult]: + """Get reactions on a pull request comment.""" + return scm.get_pull_request_comment_reactions( + pull_request_id, comment_id, pagination, request_options + ) - def create_issue_reaction( - self, issue_id: str, reaction: Reaction - ) -> ActionResult[ReactionResult]: - """Create a reaction on an issue.""" - return self._exec( - ActionMap.create_issue_reaction, # type: ignore[type-abstract] - lambda p: p.create_issue_reaction(issue_id, reaction), - ) - def delete_issue_reaction(self, issue_id: str, reaction_id: str) -> None: - """Delete a reaction on an issue.""" - return self._exec( - ActionMap.delete_issue_reaction, # type: ignore[type-abstract] - lambda p: p.delete_issue_reaction(issue_id, reaction_id), - ) - - def get_pull_request_reactions( - self, - pull_request_id: str, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[ReactionResult]: - """Get reactions on a pull request.""" - return self._exec( - ActionMap.get_pull_request_reactions, # type: ignore[type-abstract] - lambda p: p.get_pull_request_reactions(pull_request_id, pagination, request_options), - ) +def create_pull_request_comment_reaction( + scm: CreatePullRequestCommentReactionProtocol, + pull_request_id: str, + comment_id: str, + reaction: Reaction, +) -> ActionResult[ReactionResult]: + """Create a reaction on a pull request comment.""" + return scm.create_pull_request_comment_reaction(pull_request_id, comment_id, reaction) + + +def delete_pull_request_comment_reaction( + scm: DeletePullRequestCommentReactionProtocol, + pull_request_id: str, + comment_id: str, + reaction_id: str, +) -> None: + """Delete a reaction on a pull request comment.""" + return scm.delete_pull_request_comment_reaction(pull_request_id, comment_id, reaction_id) + + +def get_issue_reactions( + scm: GetIssueReactionsProtocol, + issue_id: str, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[ReactionResult]: + """Get reactions on an issue.""" + return scm.get_issue_reactions(issue_id, pagination, request_options) + + +def create_issue_reaction( + scm: CreateIssueReactionProtocol, issue_id: str, reaction: Reaction +) -> ActionResult[ReactionResult]: + """Create a reaction on an issue.""" + return scm.create_issue_reaction(issue_id, reaction) + + +def delete_issue_reaction( + scm: DeleteIssueReactionProtocol, issue_id: str, reaction_id: str +) -> None: + """Delete a reaction on an issue.""" + return scm.delete_issue_reaction(issue_id, reaction_id) + + +def get_pull_request_reactions( + scm: GetPullRequestReactionsProtocol, + pull_request_id: str, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[ReactionResult]: + """Get reactions on a pull request.""" + return scm.get_pull_request_reactions(pull_request_id, pagination, request_options) + + +def create_pull_request_reaction( + scm: CreatePullRequestReactionProtocol, pull_request_id: str, reaction: Reaction +) -> ActionResult[ReactionResult]: + """Create a reaction on a pull request.""" + return scm.create_pull_request_reaction(pull_request_id, reaction) + + +def delete_pull_request_reaction( + scm: DeletePullRequestReactionProtocol, pull_request_id: str, reaction_id: str +) -> None: + """Delete a reaction on a pull request.""" + return scm.delete_pull_request_reaction(pull_request_id, reaction_id) + + +def get_branch( + scm: GetBranchProtocol, + branch: BranchName, + request_options: RequestOptions | None = None, +) -> ActionResult[GitRef]: + """Get a branch reference.""" + return scm.get_branch(branch, request_options) + + +def create_branch(scm: CreateBranchProtocol, branch: BranchName, sha: SHA) -> ActionResult[GitRef]: + """Create a new branch pointing at the given SHA.""" + return scm.create_branch(branch, sha) + + +def update_branch( + scm: UpdateBranchProtocol, branch: BranchName, sha: SHA, force: bool = False +) -> ActionResult[GitRef]: + """Update a branch to point at a new SHA.""" + return scm.update_branch(branch, sha, force) + + +def create_git_blob( + scm: CreateGitBlobProtocol, content: str, encoding: str +) -> ActionResult[GitBlob]: + """Create a git blob object.""" + return scm.create_git_blob(content, encoding) + + +def get_file_content( + scm: GetFileContentProtocol, + path: str, + ref: str | None = None, + request_options: RequestOptions | None = None, +) -> ActionResult[FileContent]: + return scm.get_file_content(path, ref, request_options) + + +def get_commit( + scm: GetCommitProtocol, + sha: SHA, + request_options: RequestOptions | None = None, +) -> ActionResult[Commit]: + return scm.get_commit(sha, request_options) + + +def get_commits( + scm: GetCommitsProtocol, + ref: str | None = None, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[Commit]: + """ + Get a paginated list of commits. + + `ref` is either a branch name, a tag name, or a commit SHA. + Specifying a commit SHA retrieves commits up to the given commit SHA. + + Commits are returned in descending order. Equivalent to `git log ref`. + """ + return scm.get_commits(ref=ref, pagination=pagination, request_options=request_options) - def create_pull_request_reaction( - self, pull_request_id: str, reaction: Reaction - ) -> ActionResult[ReactionResult]: - """Create a reaction on a pull request.""" - return self._exec( - ActionMap.create_pull_request_reaction, # type: ignore[type-abstract] - lambda p: p.create_pull_request_reaction(pull_request_id, reaction), - ) - - def delete_pull_request_reaction(self, pull_request_id: str, reaction_id: str) -> None: - """Delete a reaction on a pull request.""" - return self._exec( - ActionMap.delete_pull_request_reaction, # type: ignore[type-abstract] - lambda p: p.delete_pull_request_reaction(pull_request_id, reaction_id), - ) - - def get_branch( - self, - branch: BranchName, - request_options: RequestOptions | None = None, - ) -> ActionResult[GitRef]: - """Get a branch reference.""" - return self._exec( - ActionMap.get_branch, # type: ignore[type-abstract] - lambda p: p.get_branch(branch, request_options), - ) - - def create_branch(self, branch: BranchName, sha: SHA) -> ActionResult[GitRef]: - """Create a new branch pointing at the given SHA.""" - return self._exec( - ActionMap.create_branch, # type: ignore[type-abstract] - lambda p: p.create_branch(branch, sha), - ) - - def update_branch( - self, branch: BranchName, sha: SHA, force: bool = False - ) -> ActionResult[GitRef]: - """Update a branch to point at a new SHA.""" - return self._exec( - ActionMap.update_branch, # type: ignore[type-abstract] - lambda p: p.update_branch(branch, sha, force), - ) - - def create_git_blob(self, content: str, encoding: str) -> ActionResult[GitBlob]: - """Create a git blob object.""" - return self._exec( - ActionMap.create_git_blob, # type: ignore[type-abstract] - lambda p: p.create_git_blob(content, encoding), - ) - - def get_file_content( - self, - path: str, - ref: str | None = None, - request_options: RequestOptions | None = None, - ) -> ActionResult[FileContent]: - return self._exec( - ActionMap.get_file_content, # type: ignore[type-abstract] - lambda p: p.get_file_content(path, ref, request_options), - ) - - def get_archive_link( - self, - ref: str, - archive_format: ArchiveFormat = "tarball", - ) -> ActionResult[ArchiveLink]: - """Get a URL to download a repository archive.""" - return self._exec( - ActionMap.get_archive_link, # type: ignore[type-abstract] - lambda p: p.get_archive_link(ref, archive_format), - ) - - def get_commit( - self, - sha: SHA, - request_options: RequestOptions | None = None, - ) -> ActionResult[Commit]: - return self._exec( - ActionMap.get_commit, # type: ignore[type-abstract] - lambda p: p.get_commit(sha, request_options), - ) - - def get_commits( - self, - ref: str | None = None, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[Commit]: - """ - Get a paginated list of commits. - - `ref` is either a branch name, a tag name, or a commit SHA. - Specifying a commit SHA retrieves commits up to the given commit SHA. - - Commits are returned in descending order. Equivalent to `git log ref`. - """ - return self._exec( - ActionMap.get_commits, # type: ignore[type-abstract] - lambda p: p.get_commits( - ref=ref, pagination=pagination, request_options=request_options - ), - ) - - def get_commits_by_path( - self, - path: str, - ref: str | None = None, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[Commit]: - """ - Get a paginated list of commits for a given filepath. - - `ref` is either a branch name, a tag name, or a commit SHA. - Specifying a commit SHA retrieves commits up to the given commit SHA. - - Commits are returned in descending order. Equivalent to `git log ref`. - """ - return self._exec( - ActionMap.get_commits_by_path, # type: ignore[type-abstract] - lambda p: p.get_commits_by_path( - path=path, ref=ref, pagination=pagination, request_options=request_options - ), - ) - - def compare_commits( - self, - start_sha: SHA, - end_sha: SHA, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[Commit]: - return self._exec( - ActionMap.compare_commits, # type: ignore[type-abstract] - lambda p: p.compare_commits(start_sha, end_sha, pagination, request_options), - ) - - def get_tree( - self, - tree_sha: SHA, - recursive: bool = True, - request_options: RequestOptions | None = None, - ) -> ActionResult[GitTree]: - return self._exec( - ActionMap.get_tree, # type: ignore[type-abstract] - lambda p: p.get_tree(tree_sha, recursive=recursive, request_options=request_options), - ) - - def get_git_commit( - self, - sha: SHA, - request_options: RequestOptions | None = None, - ) -> ActionResult[GitCommitObject]: - return self._exec( - ActionMap.get_git_commit, # type: ignore[type-abstract] - lambda p: p.get_git_commit(sha, request_options), - ) - - def create_git_tree( - self, - tree: list[InputTreeEntry], - base_tree: SHA | None = None, - ) -> ActionResult[GitTree]: - return self._exec( - ActionMap.create_git_tree, # type: ignore[type-abstract] - lambda p: p.create_git_tree(tree, base_tree=base_tree), - ) - - def create_git_commit( - self, message: str, tree_sha: SHA, parent_shas: list[SHA] - ) -> ActionResult[GitCommitObject]: - return self._exec( - ActionMap.create_git_commit, # type: ignore[type-abstract] - lambda p: p.create_git_commit(message, tree_sha, parent_shas), - ) - - def get_pull_request_files( - self, - pull_request_id: str, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[PullRequestFile]: - return self._exec( - ActionMap.get_pull_request_files, # type: ignore[type-abstract] - lambda p: p.get_pull_request_files(pull_request_id, pagination, request_options), - ) - - def get_pull_request_commits( - self, - pull_request_id: str, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[PullRequestCommit]: - return self._exec( - ActionMap.get_pull_request_commits, # type: ignore[type-abstract] - lambda p: p.get_pull_request_commits(pull_request_id, pagination, request_options), - ) - - def get_pull_request_diff( - self, - pull_request_id: str, - request_options: RequestOptions | None = None, - ) -> ActionResult[str]: - return self._exec( - ActionMap.get_pull_request_diff, # type: ignore[type-abstract] - lambda p: p.get_pull_request_diff(pull_request_id, request_options), - ) - - def get_pull_requests( - self, - state: PullRequestState | None = "open", - head: BranchName | None = None, - pagination: PaginationParams | None = None, - request_options: RequestOptions | None = None, - ) -> PaginatedActionResult[PullRequest]: - return self._exec( - ActionMap.get_pull_requests, # type: ignore[type-abstract] - lambda p: p.get_pull_requests(state, head, pagination, request_options), - ) - - def create_pull_request( - self, - title: str, - body: str, - head: BranchName, - base: BranchName, - ) -> ActionResult[PullRequest]: - return self._exec( - ActionMap.create_pull_request, # type: ignore[type-abstract] - lambda p: p.create_pull_request(title, body, head, base), - ) - - def create_pull_request_draft( - self, - title: str, - body: str, - head: BranchName, - base: BranchName, - ) -> ActionResult[PullRequest]: - return self._exec( - ActionMap.create_pull_request_draft, # type: ignore[type-abstract] - lambda p: p.create_pull_request_draft(title, body, head, base), - ) - - def update_pull_request( - self, - pull_request_id: str, - title: str | None = None, - body: str | None = None, - state: PullRequestState | None = None, - ) -> ActionResult[PullRequest]: - return self._exec( - ActionMap.update_pull_request, # type: ignore[type-abstract] - lambda p: p.update_pull_request(pull_request_id, title=title, body=body, state=state), - ) - - def request_review(self, pull_request_id: str, reviewers: list[str]) -> None: - return self._exec( - ActionMap.request_review, # type: ignore[type-abstract] - lambda p: p.request_review(pull_request_id, reviewers), - ) - - def create_review_comment_file( - self, - pull_request_id: str, - commit_id: SHA, - body: str, - path: str, - side: ReviewSide, - ) -> ActionResult[ReviewComment]: - """Leave a review comment on a file.""" - return self._exec( - ActionMap.create_review_comment_file, # type: ignore[type-abstract] - lambda p: p.create_review_comment_file(pull_request_id, commit_id, body, path, side), - ) - - def create_review_comment_reply( - self, - pull_request_id: str, - body: str, - comment_id: str, - ) -> ActionResult[ReviewComment]: - """Leave a review comment in reply to another review comment.""" - return self._exec( - ActionMap.create_review_comment_reply, # type: ignore[type-abstract] - lambda p: p.create_review_comment_reply(pull_request_id, body, comment_id), - ) - - def create_review( - self, - pull_request_id: str, - commit_sha: SHA, - event: ReviewEvent, - comments: list[ReviewCommentInput], - body: str | None = None, - ) -> ActionResult[Review]: - return self._exec( - ActionMap.create_review, # type: ignore[type-abstract] - lambda p: p.create_review(pull_request_id, commit_sha, event, comments, body=body), - ) - - def create_check_run( - self, - name: str, - head_sha: SHA, - status: BuildStatus | None = None, - conclusion: BuildConclusion | None = None, - external_id: str | None = None, - started_at: str | None = None, - completed_at: str | None = None, - output: CheckRunOutput | None = None, - ) -> ActionResult[CheckRun]: - return self._exec( - ActionMap.create_check_run, # type: ignore[type-abstract] - lambda p: p.create_check_run( - name, - head_sha, - status=status, - conclusion=conclusion, - external_id=external_id, - started_at=started_at, - completed_at=completed_at, - output=output, - ), - ) - - def get_check_run( - self, - check_run_id: ResourceId, - request_options: RequestOptions | None = None, - ) -> ActionResult[CheckRun]: - return self._exec( - ActionMap.get_check_run, # type: ignore[type-abstract] - lambda p: p.get_check_run(check_run_id, request_options), - ) - - def update_check_run( - self, - check_run_id: ResourceId, - status: BuildStatus | None = None, - conclusion: BuildConclusion | None = None, - output: CheckRunOutput | None = None, - ) -> ActionResult[CheckRun]: - return self._exec( - ActionMap.update_check_run, # type: ignore[type-abstract] - lambda p: p.update_check_run( - check_run_id, status=status, conclusion=conclusion, output=output - ), - ) - - def minimize_comment(self, comment_node_id: str, reason: str) -> None: - return self._exec( - ActionMap.minimize_comment, # type: ignore[type-abstract] - lambda p: p.minimize_comment(comment_node_id, reason), - ) + +def get_commits_by_path( + scm: GetCommitsByPathProtocol, + path: str, + ref: str | None = None, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[Commit]: + """ + Get a paginated list of commits for a given filepath. + + `ref` is either a branch name, a tag name, or a commit SHA. + Specifying a commit SHA retrieves commits up to the given commit SHA. + + Commits are returned in descending order. Equivalent to `git log ref`. + """ + return scm.get_commits_by_path( + path=path, ref=ref, pagination=pagination, request_options=request_options + ) + + +def compare_commits( + scm: CompareCommitsProtocol, + start_sha: SHA, + end_sha: SHA, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[Commit]: + return scm.compare_commits(start_sha, end_sha, pagination, request_options) + + +def get_tree( + scm: GetTreeProtocol, + tree_sha: SHA, + recursive: bool = True, + request_options: RequestOptions | None = None, +) -> ActionResult[GitTree]: + return scm.get_tree(tree_sha, recursive=recursive, request_options=request_options) + + +def get_git_commit( + scm: GetGitCommitProtocol, + sha: SHA, + request_options: RequestOptions | None = None, +) -> ActionResult[GitCommitObject]: + return scm.get_git_commit(sha, request_options) + + +def create_git_tree( + scm: CreateGitTreeProtocol, + tree: list[InputTreeEntry], + base_tree: SHA | None = None, +) -> ActionResult[GitTree]: + return scm.create_git_tree(tree, base_tree=base_tree) + + +def create_git_commit( + scm: CreateGitCommitProtocol, message: str, tree_sha: SHA, parent_shas: list[SHA] +) -> ActionResult[GitCommitObject]: + return scm.create_git_commit(message, tree_sha, parent_shas) + + +def get_pull_request_files( + scm: GetPullRequestFilesProtocol, + pull_request_id: str, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[PullRequestFile]: + return scm.get_pull_request_files(pull_request_id, pagination, request_options) + + +def get_pull_request_commits( + scm: GetPullRequestCommitsProtocol, + pull_request_id: str, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[PullRequestCommit]: + return scm.get_pull_request_commits(pull_request_id, pagination, request_options) + + +def get_pull_request_diff( + scm: GetPullRequestDiffProtocol, + pull_request_id: str, + request_options: RequestOptions | None = None, +) -> ActionResult[str]: + return scm.get_pull_request_diff(pull_request_id, request_options) + + +def get_pull_requests( + scm: GetPullRequestsProtocol, + state: PullRequestState | None = "open", + head: BranchName | None = None, + pagination: PaginationParams | None = None, + request_options: RequestOptions | None = None, +) -> PaginatedActionResult[PullRequest]: + return scm.get_pull_requests(state, head, pagination, request_options) + + +def create_pull_request( + scm: CreatePullRequestProtocol, + title: str, + body: str, + head: BranchName, + base: BranchName, +) -> ActionResult[PullRequest]: + return scm.create_pull_request(title, body, head, base) + + +def create_pull_request_draft( + scm: CreatePullRequestDraftProtocol, + title: str, + body: str, + head: BranchName, + base: BranchName, +) -> ActionResult[PullRequest]: + return scm.create_pull_request_draft(title, body, head, base) + + +def update_pull_request( + scm: UpdatePullRequestProtocol, + pull_request_id: str, + title: str | None = None, + body: str | None = None, + state: PullRequestState | None = None, +) -> ActionResult[PullRequest]: + return scm.update_pull_request(pull_request_id, title=title, body=body, state=state) + + +def request_review(scm: RequestReviewProtocol, pull_request_id: str, reviewers: list[str]) -> None: + return scm.request_review(pull_request_id, reviewers) + + +def create_review_comment_file( + scm: CreateReviewCommentFileProtocol, + pull_request_id: str, + commit_id: SHA, + body: str, + path: str, + side: ReviewSide, +) -> ActionResult[ReviewComment]: + """Leave a review comment on a file.""" + return scm.create_review_comment_file(pull_request_id, commit_id, body, path, side) + + +def create_review_comment_reply( + scm: CreateReviewCommentReplyProtocol, + pull_request_id: str, + body: str, + comment_id: str, +) -> ActionResult[ReviewComment]: + """Leave a review comment in reply to another review comment.""" + return scm.create_review_comment_reply(pull_request_id, body, comment_id) + + +def create_review( + scm: CreateReviewProtocol, + pull_request_id: str, + commit_sha: SHA, + event: ReviewEvent, + comments: list[ReviewCommentInput], + body: str | None = None, +) -> ActionResult[Review]: + return scm.create_review(pull_request_id, commit_sha, event, comments, body=body) + + +def create_check_run( + scm: CreateCheckRunProtocol, + name: str, + head_sha: SHA, + status: BuildStatus | None = None, + conclusion: BuildConclusion | None = None, + external_id: str | None = None, + started_at: str | None = None, + completed_at: str | None = None, + output: CheckRunOutput | None = None, +) -> ActionResult[CheckRun]: + return scm.create_check_run( + name, + head_sha, + status=status, + conclusion=conclusion, + external_id=external_id, + started_at=started_at, + completed_at=completed_at, + output=output, + ) + + +def get_check_run( + scm: GetCheckRunProtocol, + check_run_id: ResourceId, + request_options: RequestOptions | None = None, +) -> ActionResult[CheckRun]: + return scm.get_check_run(check_run_id, request_options) + + +def update_check_run( + scm: UpdateCheckRunProtocol, + check_run_id: ResourceId, + status: BuildStatus | None = None, + conclusion: BuildConclusion | None = None, + output: CheckRunOutput | None = None, +) -> ActionResult[CheckRun]: + return scm.update_check_run(check_run_id, status=status, conclusion=conclusion, output=output) + + +def minimize_comment(scm: MinimizeCommentProtocol, comment_node_id: str, reason: str) -> None: + return scm.minimize_comment(comment_node_id, reason) + + +def get_archive_link( + scm: GetArchiveLinkProtocol, + ref: str, + archive_format: ArchiveFormat = "tarball", +) -> ActionResult[ArchiveLink]: + """Get a URL to download a repository archive.""" + return scm.get_archive_link(ref, archive_format) + + +__all__ = ( + "SourceCodeManager", + "compare_commits", + "create_branch", + "create_check_run", + "create_git_blob", + "create_git_commit", + "create_git_tree", + "create_issue_comment_reaction", + "create_issue_comment", + "create_issue_reaction", + "create_pull_request_comment_reaction", + "create_pull_request_comment", + "create_pull_request_draft", + "create_pull_request_reaction", + "create_pull_request", + "create_review_comment_file", + "create_review_comment_reply", + "create_review", + "delete_issue_comment_reaction", + "delete_issue_comment", + "delete_issue_reaction", + "delete_pull_request_comment_reaction", + "delete_pull_request_comment", + "delete_pull_request_reaction", + "get_archive_link", + "get_branch", + "get_check_run", + "get_commit", + "get_commits_by_path", + "get_commits", + "get_file_content", + "get_git_commit", + "get_issue_comment_reactions", + "get_issue_comments", + "get_issue_reactions", + "get_pull_request_comment_reactions", + "get_pull_request_comments", + "get_pull_request_commits", + "get_pull_request_diff", + "get_pull_request_files", + "get_pull_request_reactions", + "get_pull_request", + "get_pull_requests", + "get_tree", + "minimize_comment", + "request_review", + "update_branch", + "update_check_run", + "update_pull_request", +) diff --git a/src/sentry/scm/private/facade.py b/src/sentry/scm/private/facade.py new file mode 100644 index 00000000000000..f72c48303e7d2a --- /dev/null +++ b/src/sentry/scm/private/facade.py @@ -0,0 +1,87 @@ +from __future__ import annotations + +from collections.abc import Hashable +from functools import lru_cache +from typing import Any, Callable, cast + +from sentry.scm.private.helpers import exec_provider_fn +from sentry.scm.private.ipc import record_count_metric +from sentry.scm.private.provider import ALL_PROTOCOLS, Provider +from sentry.scm.types import Referrer + + +def _delegating_method(name: str) -> Callable[..., Any]: + """Return a method that forwards calls to self.provider..""" + + def method(self: Facade, *args: Any, **kwargs: Any) -> Any: + return exec_provider_fn( + self.provider, + referrer=self.referrer, + provider_fn=lambda: getattr(self.provider, name)(*args, **kwargs), + record_count=self.record_count, + ) + + method.__name__ = name + return method + + +def _protocol_attrs(proto: object) -> tuple[str, ...]: + """Return the runtime protocol attribute names used for capability detection.""" + return cast(tuple[str, ...], getattr(proto, "__protocol_attrs__", ())) + + +@lru_cache(maxsize=32) +def _facade_type_for_provider_class( + cls: type[Facade], provider_cls: type[Provider] +) -> type[Facade]: + """Build (and cache) one facade subclass per implementation class.""" + methods: dict[str, Any] = {} + for proto in ALL_PROTOCOLS: + protocol_attrs = _protocol_attrs(proto) + if all(hasattr(provider_cls, attr) for attr in protocol_attrs): + for attr in protocol_attrs: + if attr not in methods: + methods[attr] = _delegating_method(attr) + return type(f"FacadeFor{provider_cls.__name__}", (cls,), methods) + + +class Facade: + # `Facade` itself declares no capability methods, so MyPy rejects direct + # calls like `facade.create_issue_comment()` and forces `isinstance` guards. + # + # At construction time __new__ builds a private subclass that has exactly + # the methods supported by `impl` as real class-body attributes. Python + # 3.12+ runtime_checkable isinstance() checks look at the class body, not + # __getattr__, so this is what makes `isinstance(facade, protocol)` work. + # + # After the isinstance guard MyPy narrows `facade` to `Facade & Protocol` + # and statically validates method calls. + + provider: Provider + referrer: Referrer + record_count: Callable[[str, int, dict[str, str]], None] + + def __new__( + cls, + provider: Provider, + *, + referrer: Referrer = "shared", + record_count: Callable[[str, int, dict[str, str]], None] = record_count_metric, + ) -> Facade: + facade_cls = _facade_type_for_provider_class( + cast(Hashable, cls), cast(Hashable, type(provider)) + ) + instance = object.__new__(facade_cls) + instance.provider = provider + instance.referrer = referrer + instance.record_count = record_count + return instance + + def __init__( + self, + provider: Provider, + *, + referrer: Referrer = "shared", + record_count: Callable[[str, int, dict[str, str]], None] = record_count_metric, + ) -> None: + pass diff --git a/src/sentry/scm/private/provider.py b/src/sentry/scm/private/provider.py index dbeb339ec4fb08..502494f5068b10 100644 --- a/src/sentry/scm/private/provider.py +++ b/src/sentry/scm/private/provider.py @@ -531,58 +531,59 @@ class ResolveReviewThreadProtocol(Protocol): def resolve_review_thread(self, thread_node_id: str) -> None: ... -class ActionMap: - get_issue_comments = GetIssueCommentsProtocol - create_issue_comment = CreateIssueCommentProtocol - delete_issue_comment = DeleteIssueCommentProtocol - get_pull_request_comments = GetPullRequestCommentsProtocol - create_pull_request_comment = CreatePullRequestCommentProtocol - delete_pull_request_comment = DeletePullRequestCommentProtocol - get_issue_comment_reactions = GetIssueCommentReactionsProtocol - create_issue_comment_reaction = CreateIssueCommentReactionProtocol - delete_issue_comment_reaction = DeleteIssueCommentReactionProtocol - get_pull_request_comment_reactions = GetPullRequestCommentReactionsProtocol - create_pull_request_comment_reaction = CreatePullRequestCommentReactionProtocol - delete_pull_request_comment_reaction = DeletePullRequestCommentReactionProtocol - get_issue_reactions = GetIssueReactionsProtocol - create_issue_reaction = CreateIssueReactionProtocol - delete_issue_reaction = DeleteIssueReactionProtocol - get_pull_request_reactions = GetPullRequestReactionsProtocol - create_pull_request_reaction = CreatePullRequestReactionProtocol - delete_pull_request_reaction = DeletePullRequestReactionProtocol - get_branch = GetBranchProtocol - create_branch = CreateBranchProtocol - update_branch = UpdateBranchProtocol - get_commit = GetCommitProtocol - get_commits = GetCommitsProtocol - get_commits_by_path = GetCommitsByPathProtocol - compare_commits = CompareCommitsProtocol - get_pull_request = GetPullRequestProtocol - get_pull_requests = GetPullRequestsProtocol - get_pull_request_files = GetPullRequestFilesProtocol - get_pull_request_commits = GetPullRequestCommitsProtocol - get_pull_request_diff = GetPullRequestDiffProtocol - create_pull_request = CreatePullRequestProtocol - create_pull_request_draft = CreatePullRequestDraftProtocol - update_pull_request = UpdatePullRequestProtocol - request_review = RequestReviewProtocol - get_tree = GetTreeProtocol - get_git_commit = GetGitCommitProtocol - create_git_blob = CreateGitBlobProtocol - create_git_tree = CreateGitTreeProtocol - create_git_commit = CreateGitCommitProtocol - get_file_content = GetFileContentProtocol - get_archive_link = GetArchiveLinkProtocol - get_check_run = GetCheckRunProtocol - create_check_run = CreateCheckRunProtocol - update_check_run = UpdateCheckRunProtocol - create_review_comment_file = CreateReviewCommentFileProtocol - create_review_comment_line = CreateReviewCommentLineProtocol - create_review_comment_multiline = CreateReviewCommentMultilineProtocol - create_review_comment_reply = CreateReviewCommentReplyProtocol - create_review = CreateReviewProtocol - minimize_comment = MinimizeCommentProtocol - resolve_review_thread = ResolveReviewThreadProtocol +ALL_PROTOCOLS = ( + CompareCommitsProtocol, + CreateBranchProtocol, + CreateCheckRunProtocol, + CreateGitBlobProtocol, + CreateGitCommitProtocol, + CreateGitTreeProtocol, + CreateIssueCommentProtocol, + CreateIssueCommentReactionProtocol, + CreateIssueReactionProtocol, + CreatePullRequestCommentProtocol, + CreatePullRequestCommentReactionProtocol, + CreatePullRequestDraftProtocol, + CreatePullRequestProtocol, + CreatePullRequestReactionProtocol, + CreateReviewCommentFileProtocol, + CreateReviewCommentLineProtocol, + CreateReviewCommentMultilineProtocol, + CreateReviewCommentReplyProtocol, + CreateReviewProtocol, + DeleteIssueCommentProtocol, + DeleteIssueCommentReactionProtocol, + DeleteIssueReactionProtocol, + DeletePullRequestCommentProtocol, + DeletePullRequestCommentReactionProtocol, + DeletePullRequestReactionProtocol, + GetArchiveLinkProtocol, + GetBranchProtocol, + GetCheckRunProtocol, + GetCommitProtocol, + GetCommitsByPathProtocol, + GetCommitsProtocol, + GetFileContentProtocol, + GetGitCommitProtocol, + GetIssueCommentReactionsProtocol, + GetIssueCommentsProtocol, + GetIssueReactionsProtocol, + GetPullRequestCommentReactionsProtocol, + GetPullRequestCommentsProtocol, + GetPullRequestCommitsProtocol, + GetPullRequestDiffProtocol, + GetPullRequestFilesProtocol, + GetPullRequestProtocol, + GetPullRequestReactionsProtocol, + GetPullRequestsProtocol, + GetTreeProtocol, + MinimizeCommentProtocol, + RequestReviewProtocol, + ResolveReviewThreadProtocol, + UpdateBranchProtocol, + UpdateCheckRunProtocol, + UpdatePullRequestProtocol, +) class Provider(Protocol): diff --git a/src/sentry/scm/private/rpc.py b/src/sentry/scm/private/rpc.py index 6c10eea53d36e8..0d9f972ba91417 100644 --- a/src/sentry/scm/private/rpc.py +++ b/src/sentry/scm/private/rpc.py @@ -2,7 +2,57 @@ import pydantic -from sentry.scm.actions import SourceCodeManager +from sentry.scm.actions import ( + SourceCodeManager, + compare_commits, + create_branch, + create_check_run, + create_git_blob, + create_git_commit, + create_git_tree, + create_issue_comment, + create_issue_comment_reaction, + create_issue_reaction, + create_pull_request, + create_pull_request_comment, + create_pull_request_comment_reaction, + create_pull_request_draft, + create_pull_request_reaction, + create_review, + create_review_comment_file, + create_review_comment_reply, + delete_issue_comment, + delete_issue_comment_reaction, + delete_issue_reaction, + delete_pull_request_comment, + delete_pull_request_comment_reaction, + delete_pull_request_reaction, + get_archive_link, + get_branch, + get_check_run, + get_commit, + get_commits, + get_commits_by_path, + get_file_content, + get_git_commit, + get_issue_comment_reactions, + get_issue_comments, + get_issue_reactions, + get_pull_request, + get_pull_request_comment_reactions, + get_pull_request_comments, + get_pull_request_commits, + get_pull_request_diff, + get_pull_request_files, + get_pull_request_reactions, + get_pull_requests, + get_tree, + minimize_comment, + request_review, + update_branch, + update_check_run, + update_pull_request, +) from sentry.scm.errors import ( SCMProviderNotSupported, SCMRpcActionCallError, @@ -47,6 +97,10 @@ def dispatch(action_name: str, raw_request_data: dict[str, Any]): try: return scm_action_registry[action_name](scm, **request.args.get_extra_fields()) + except AttributeError as e: + raise SCMProviderNotSupported( + "call_missing_provider_method is not supported by service-provider GitHubProvider" + ) from e except TypeError as e: raise SCMRpcActionCallError(action_name, str(e)) from e @@ -79,53 +133,52 @@ def get_extra_fields(self) -> dict[str, Any]: # If a method of SourceCodeManager accepts only JSON-serializable arguments, by names, and # returns a JSON-serializable type, then it can be listed here directly. # Else, an adapter function must be used. - "can_v1": SourceCodeManager.can, - "get_issue_comments_v1": SourceCodeManager.get_issue_comments, - "create_issue_comment_v1": SourceCodeManager.create_issue_comment, - "delete_issue_comment_v1": SourceCodeManager.delete_issue_comment, - "get_pull_request_v1": SourceCodeManager.get_pull_request, - "get_pull_request_comments_v1": SourceCodeManager.get_pull_request_comments, - "create_pull_request_comment_v1": SourceCodeManager.create_pull_request_comment, - "delete_pull_request_comment_v1": SourceCodeManager.delete_pull_request_comment, - "get_issue_comment_reactions_v1": SourceCodeManager.get_issue_comment_reactions, - "create_issue_comment_reaction_v1": SourceCodeManager.create_issue_comment_reaction, - "delete_issue_comment_reaction_v1": SourceCodeManager.delete_issue_comment_reaction, - "get_pull_request_comment_reactions_v1": SourceCodeManager.get_pull_request_comment_reactions, - "create_pull_request_comment_reaction_v1": SourceCodeManager.create_pull_request_comment_reaction, - "delete_pull_request_comment_reaction_v1": SourceCodeManager.delete_pull_request_comment_reaction, - "get_issue_reactions_v1": SourceCodeManager.get_issue_reactions, - "create_issue_reaction_v1": SourceCodeManager.create_issue_reaction, - "delete_issue_reaction_v1": SourceCodeManager.delete_issue_reaction, - "get_pull_request_reactions_v1": SourceCodeManager.get_pull_request_reactions, - "create_pull_request_reaction_v1": SourceCodeManager.create_pull_request_reaction, - "delete_pull_request_reaction_v1": SourceCodeManager.delete_pull_request_reaction, - "get_branch_v1": SourceCodeManager.get_branch, - "create_branch_v1": SourceCodeManager.create_branch, - "update_branch_v1": SourceCodeManager.update_branch, - "create_git_blob_v1": SourceCodeManager.create_git_blob, - "get_file_content_v1": SourceCodeManager.get_file_content, - "get_commit_v1": SourceCodeManager.get_commit, - "get_commits_v1": SourceCodeManager.get_commits, - "get_commits_by_path_v1": SourceCodeManager.get_commits_by_path, - "compare_commits_v1": SourceCodeManager.compare_commits, - "get_tree_v1": SourceCodeManager.get_tree, - "get_git_commit_v1": SourceCodeManager.get_git_commit, - "create_git_tree_v1": SourceCodeManager.create_git_tree, - "create_git_commit_v1": SourceCodeManager.create_git_commit, - "get_pull_request_files_v1": SourceCodeManager.get_pull_request_files, - "get_pull_request_commits_v1": SourceCodeManager.get_pull_request_commits, - "get_pull_request_diff_v1": SourceCodeManager.get_pull_request_diff, - "get_pull_requests_v1": SourceCodeManager.get_pull_requests, - "create_pull_request_v1": SourceCodeManager.create_pull_request, - "create_pull_request_draft_v1": SourceCodeManager.create_pull_request_draft, - "update_pull_request_v1": SourceCodeManager.update_pull_request, - "request_review_v1": SourceCodeManager.request_review, - "create_review_comment_file_v1": SourceCodeManager.create_review_comment_file, - "create_review_comment_reply_v1": SourceCodeManager.create_review_comment_reply, - "create_review_v1": SourceCodeManager.create_review, - "create_check_run_v1": SourceCodeManager.create_check_run, - "get_check_run_v1": SourceCodeManager.get_check_run, - "update_check_run_v1": SourceCodeManager.update_check_run, - "minimize_comment_v1": SourceCodeManager.minimize_comment, - "get_archive_link_v1": SourceCodeManager.get_archive_link, + "compare_commits_v1": compare_commits, + "create_branch_v1": create_branch, + "create_check_run_v1": create_check_run, + "create_git_blob_v1": create_git_blob, + "create_git_commit_v1": create_git_commit, + "create_git_tree_v1": create_git_tree, + "create_issue_comment_reaction_v1": create_issue_comment_reaction, + "create_issue_comment_v1": create_issue_comment, + "create_issue_reaction_v1": create_issue_reaction, + "create_pull_request_comment_reaction_v1": create_pull_request_comment_reaction, + "create_pull_request_comment_v1": create_pull_request_comment, + "create_pull_request_draft_v1": create_pull_request_draft, + "create_pull_request_reaction_v1": create_pull_request_reaction, + "create_pull_request_v1": create_pull_request, + "create_review_comment_file_v1": create_review_comment_file, + "create_review_comment_reply_v1": create_review_comment_reply, + "create_review_v1": create_review, + "delete_issue_comment_reaction_v1": delete_issue_comment_reaction, + "delete_issue_comment_v1": delete_issue_comment, + "delete_issue_reaction_v1": delete_issue_reaction, + "delete_pull_request_comment_reaction_v1": delete_pull_request_comment_reaction, + "delete_pull_request_comment_v1": delete_pull_request_comment, + "delete_pull_request_reaction_v1": delete_pull_request_reaction, + "get_branch_v1": get_branch, + "get_check_run_v1": get_check_run, + "get_commit_v1": get_commit, + "get_commits_by_path_v1": get_commits_by_path, + "get_commits_v1": get_commits, + "get_file_content_v1": get_file_content, + "get_git_commit_v1": get_git_commit, + "get_issue_comment_reactions_v1": get_issue_comment_reactions, + "get_issue_comments_v1": get_issue_comments, + "get_issue_reactions_v1": get_issue_reactions, + "get_pull_request_comment_reactions_v1": get_pull_request_comment_reactions, + "get_pull_request_comments_v1": get_pull_request_comments, + "get_pull_request_commits_v1": get_pull_request_commits, + "get_pull_request_diff_v1": get_pull_request_diff, + "get_pull_request_files_v1": get_pull_request_files, + "get_pull_request_reactions_v1": get_pull_request_reactions, + "get_pull_request_v1": get_pull_request, + "get_pull_requests_v1": get_pull_requests, + "get_tree_v1": get_tree, + "minimize_comment_v1": minimize_comment, + "request_review_v1": request_review, + "update_branch_v1": update_branch, + "update_check_run_v1": update_check_run, + "update_pull_request_v1": update_pull_request, + "get_archive_link_v1": get_archive_link, } diff --git a/tests/sentry/scm/endpoints/test_scm_rpc.py b/tests/sentry/scm/endpoints/test_scm_rpc.py index 20a90542881517..de49ba5470d788 100644 --- a/tests/sentry/scm/endpoints/test_scm_rpc.py +++ b/tests/sentry/scm/endpoints/test_scm_rpc.py @@ -50,6 +50,15 @@ def raise_scm_error(scm: SourceCodeManager) -> dict[str, str]: yield +@contextlib.contextmanager +def add_call_missing_provider_method(): + def call_missing_provider_method(scm: SourceCodeManager) -> None: + scm.this_method_does_not_exist() # type: ignore[attr-defined] + + with add_method("call_missing_provider_method", call_missing_provider_method): + yield + + @override_settings(SCM_RPC_SHARED_SECRET=["a-long-value-that-is-hard-to-guess"]) class TestScmRpc(APITestCase): def setUp(self) -> None: @@ -583,6 +592,24 @@ def test_scm_coded_error_in_provider_method(self) -> None: ] } + def test_attribute_error_in_provider_method_is_treated_as_provider_not_supported(self) -> None: + with add_call_missing_provider_method(): + response = self.call( + "call_missing_provider_method", + {"args": {"organization_id": self.organization.id, "repository_id": self.repo.id}}, + ) + + assert response.status_code == 400 + assert response.json() == { + "errors": [ + { + "status": "400", + "title": "Provider not supported.", + "detail": "call_missing_provider_method is not supported by service-provider GitHubProvider", + } + ] + } + def test_scm_provider_exception_in_provider_method(self) -> None: with add_raise_scm_error(SCMProviderException("Blah", 68)): response = self.call( diff --git a/tests/sentry/scm/unit/test_scm_actions.py b/tests/sentry/scm/unit/test_scm_actions.py index f86d97b8e026ed..5011c8c3cc5aaa 100644 --- a/tests/sentry/scm/unit/test_scm_actions.py +++ b/tests/sentry/scm/unit/test_scm_actions.py @@ -4,13 +4,62 @@ import pytest -from sentry.scm.actions import SourceCodeManager +from sentry.scm.actions import ( + SourceCodeManager, + compare_commits, + create_branch, + create_check_run, + create_git_blob, + create_git_commit, + create_git_tree, + create_issue_comment, + create_issue_comment_reaction, + create_issue_reaction, + create_pull_request, + create_pull_request_comment, + create_pull_request_comment_reaction, + create_pull_request_draft, + create_pull_request_reaction, + create_review, + create_review_comment_file, + create_review_comment_reply, + delete_issue_comment, + delete_issue_comment_reaction, + delete_issue_reaction, + delete_pull_request_comment, + delete_pull_request_comment_reaction, + delete_pull_request_reaction, + get_branch, + get_check_run, + get_commit, + get_commits, + get_commits_by_path, + get_file_content, + get_git_commit, + get_issue_comment_reactions, + get_issue_comments, + get_issue_reactions, + get_pull_request, + get_pull_request_comment_reactions, + get_pull_request_comments, + get_pull_request_commits, + get_pull_request_diff, + get_pull_request_files, + get_pull_request_reactions, + get_pull_requests, + get_tree, + minimize_comment, + request_review, + update_branch, + update_check_run, + update_pull_request, +) from sentry.scm.errors import ( SCMCodedError, SCMProviderException, - SCMProviderNotSupported, SCMUnhandledException, ) +from sentry.scm.private.provider import GetBranchProtocol, GetIssueReactionsProtocol from sentry.scm.types import PaginatedActionResult, ReactionResult, Referrer, Repository from tests.sentry.scm.test_fixtures import BaseTestProvider @@ -32,69 +81,69 @@ def fetch_repository(oid, rid) -> Repository: } -ALL_ACTIONS: tuple[tuple[str, dict[str, Any]], ...] = ( +ALL_ACTIONS: tuple[tuple[Callable[..., Any], dict[str, Any]], ...] = ( # Issue comments - ("get_issue_comments", {"issue_id": "1"}), - ("create_issue_comment", {"issue_id": "1", "body": "test"}), - ("delete_issue_comment", {"issue_id": "1", "comment_id": "1"}), + (get_issue_comments, {"issue_id": "1"}), + (create_issue_comment, {"issue_id": "1", "body": "test"}), + (delete_issue_comment, {"issue_id": "1", "comment_id": "1"}), # Pull request - ("get_pull_request", {"pull_request_id": "1"}), + (get_pull_request, {"pull_request_id": "1"}), # Pull request comments - ("get_pull_request_comments", {"pull_request_id": "1"}), - ("create_pull_request_comment", {"pull_request_id": "1", "body": "test"}), - ("delete_pull_request_comment", {"pull_request_id": "1", "comment_id": "1"}), + (get_pull_request_comments, {"pull_request_id": "1"}), + (create_pull_request_comment, {"pull_request_id": "1", "body": "test"}), + (delete_pull_request_comment, {"pull_request_id": "1", "comment_id": "1"}), # Issue comment reactions - ("get_issue_comment_reactions", {"issue_id": "1", "comment_id": "1"}), - ("create_issue_comment_reaction", {"issue_id": "1", "comment_id": "1", "reaction": "eyes"}), - ("delete_issue_comment_reaction", {"issue_id": "1", "comment_id": "1", "reaction_id": "123"}), + (get_issue_comment_reactions, {"issue_id": "1", "comment_id": "1"}), + (create_issue_comment_reaction, {"issue_id": "1", "comment_id": "1", "reaction": "eyes"}), + (delete_issue_comment_reaction, {"issue_id": "1", "comment_id": "1", "reaction_id": "123"}), # Pull request comment reactions - ("get_pull_request_comment_reactions", {"pull_request_id": "1", "comment_id": "1"}), + (get_pull_request_comment_reactions, {"pull_request_id": "1", "comment_id": "1"}), ( - "create_pull_request_comment_reaction", + create_pull_request_comment_reaction, {"pull_request_id": "1", "comment_id": "1", "reaction": "eyes"}, ), ( - "delete_pull_request_comment_reaction", + delete_pull_request_comment_reaction, {"pull_request_id": "1", "comment_id": "1", "reaction_id": "123"}, ), # Issue reactions - ("get_issue_reactions", {"issue_id": "1"}), - ("create_issue_reaction", {"issue_id": "1", "reaction": "eyes"}), - ("delete_issue_reaction", {"issue_id": "1", "reaction_id": "456"}), + (get_issue_reactions, {"issue_id": "1"}), + (create_issue_reaction, {"issue_id": "1", "reaction": "eyes"}), + (delete_issue_reaction, {"issue_id": "1", "reaction_id": "456"}), # Pull request reactions - ("get_pull_request_reactions", {"pull_request_id": "1"}), - ("create_pull_request_reaction", {"pull_request_id": "1", "reaction": "eyes"}), - ("delete_pull_request_reaction", {"pull_request_id": "1", "reaction_id": "456"}), + (get_pull_request_reactions, {"pull_request_id": "1"}), + (create_pull_request_reaction, {"pull_request_id": "1", "reaction": "eyes"}), + (delete_pull_request_reaction, {"pull_request_id": "1", "reaction_id": "456"}), # Branch operations - ("get_branch", {"branch": "main"}), - ("create_branch", {"branch": "feature", "sha": "abc123"}), - ("update_branch", {"branch": "feature", "sha": "def456"}), + (get_branch, {"branch": "main"}), + (create_branch, {"branch": "feature", "sha": "abc123"}), + (update_branch, {"branch": "feature", "sha": "def456"}), # Git blob operations - ("create_git_blob", {"content": "hello", "encoding": "utf-8"}), + (create_git_blob, {"content": "hello", "encoding": "utf-8"}), # File content operations - ("get_file_content", {"path": "README.md"}), + (get_file_content, {"path": "README.md"}), # Commit operations - ("get_commit", {"sha": "abc123"}), - ("get_commits", {}), - ("get_commits_by_path", {"path": "src/main.py"}), - ("compare_commits", {"start_sha": "aaa", "end_sha": "bbb"}), + (get_commit, {"sha": "abc123"}), + (get_commits, {}), + (get_commits_by_path, {"path": "src/main.py"}), + (compare_commits, {"start_sha": "aaa", "end_sha": "bbb"}), # Git data operations - ("get_tree", {"tree_sha": "tree123"}), - ("get_git_commit", {"sha": "abc123"}), - ("create_git_tree", {"tree": [{"path": "f.py", "mode": "100644", "type": "blob", "sha": "x"}]}), - ("create_git_commit", {"message": "msg", "tree_sha": "t", "parent_shas": ["p"]}), + (get_tree, {"tree_sha": "tree123"}), + (get_git_commit, {"sha": "abc123"}), + (create_git_tree, {"tree": [{"path": "f.py", "mode": "100644", "type": "blob", "sha": "x"}]}), + (create_git_commit, {"message": "msg", "tree_sha": "t", "parent_shas": ["p"]}), # Expanded pull request operations - ("get_pull_request_files", {"pull_request_id": "1"}), - ("get_pull_request_commits", {"pull_request_id": "1"}), - ("get_pull_request_diff", {"pull_request_id": "1"}), - ("get_pull_requests", {}), - ("create_pull_request", {"title": "T", "body": "B", "head": "h", "base": "b"}), - ("create_pull_request_draft", {"title": "T", "body": "B", "head": "h", "base": "b"}), - ("update_pull_request", {"pull_request_id": "1"}), - ("request_review", {"pull_request_id": "1", "reviewers": ["user1"]}), + (get_pull_request_files, {"pull_request_id": "1"}), + (get_pull_request_commits, {"pull_request_id": "1"}), + (get_pull_request_diff, {"pull_request_id": "1"}), + (get_pull_requests, {}), + (create_pull_request, {"title": "T", "body": "B", "head": "h", "base": "b"}), + (create_pull_request_draft, {"title": "T", "body": "B", "head": "h", "base": "b"}), + (update_pull_request, {"pull_request_id": "1"}), + (request_review, {"pull_request_id": "1", "reviewers": ["user1"]}), # Review operations ( - "create_review_comment_file", + create_review_comment_file, { "pull_request_id": "1", "commit_id": "abc", @@ -104,7 +153,7 @@ def fetch_repository(oid, rid) -> Repository: }, ), ( - "create_review_comment_reply", + create_review_comment_reply, { "pull_request_id": "1", "body": "comment", @@ -112,7 +161,7 @@ def fetch_repository(oid, rid) -> Repository: }, ), ( - "create_review", + create_review, { "pull_request_id": "1", "commit_sha": "abc", @@ -121,16 +170,16 @@ def fetch_repository(oid, rid) -> Repository: }, ), # Check run operations - ("create_check_run", {"name": "check", "head_sha": "abc"}), - ("get_check_run", {"check_run_id": "300"}), - ("update_check_run", {"check_run_id": "300"}), + (create_check_run, {"name": "check", "head_sha": "abc"}), + (get_check_run, {"check_run_id": "300"}), + (update_check_run, {"check_run_id": "300"}), # GraphQL mutation operations - ("minimize_comment", {"comment_node_id": "IC_abc", "reason": "OUTDATED"}), + (minimize_comment, {"comment_node_id": "IC_abc", "reason": "OUTDATED"}), ) -@pytest.mark.parametrize(("method", "kwargs"), ALL_ACTIONS) -def test_rate_limited_action(method: str, kwargs: dict[str, Any]): +@pytest.mark.parametrize(("action", "kwargs"), ALL_ACTIONS) +def test_rate_limited_action(action: Callable[..., Any], kwargs: dict[str, Any]): class RateLimitedProvider(BaseTestProvider): def is_rate_limited(self, referrer): return True @@ -138,7 +187,7 @@ def is_rate_limited(self, referrer): scm = SourceCodeManager(RateLimitedProvider()) with raises_with_code(SCMCodedError, "rate_limit_exceeded"): - getattr(scm, method)(**kwargs) + action(scm, **kwargs) def test_repository_not_found(): @@ -419,132 +468,132 @@ def _check_update_check_run(result: Any) -> None: ACTION_TESTS: tuple[tuple[Callable[..., Any], dict[str, Any], Callable[..., Any]], ...] = ( - (SourceCodeManager.get_issue_comments, {"issue_id": "1"}, _check_issue_comments), + (get_issue_comments, {"issue_id": "1"}, _check_issue_comments), ( - SourceCodeManager.create_issue_comment, + create_issue_comment, {"issue_id": "1", "body": "test"}, _check_created_comment, ), - (SourceCodeManager.delete_issue_comment, {"issue_id": "1", "comment_id": "1"}, _check_none), - (SourceCodeManager.get_pull_request, {"pull_request_id": "1"}, _check_pull_request), + (delete_issue_comment, {"issue_id": "1", "comment_id": "1"}, _check_none), + (get_pull_request, {"pull_request_id": "1"}, _check_pull_request), ( - SourceCodeManager.get_pull_request_comments, + get_pull_request_comments, {"pull_request_id": "1"}, _check_pull_request_comments, ), ( - SourceCodeManager.create_pull_request_comment, + create_pull_request_comment, {"pull_request_id": "1", "body": "test"}, _check_created_pr_comment, ), ( - SourceCodeManager.delete_pull_request_comment, + delete_pull_request_comment, {"pull_request_id": "1", "comment_id": "1"}, _check_none, ), ( - SourceCodeManager.get_issue_comment_reactions, + get_issue_comment_reactions, {"issue_id": "1", "comment_id": "1"}, _check_comment_reactions, ), ( - SourceCodeManager.create_issue_comment_reaction, + create_issue_comment_reaction, {"issue_id": "1", "comment_id": "1", "reaction": "eyes"}, _check_created_reaction, ), ( - SourceCodeManager.delete_issue_comment_reaction, + delete_issue_comment_reaction, {"issue_id": "1", "comment_id": "1", "reaction_id": "123"}, _check_none, ), ( - SourceCodeManager.get_pull_request_comment_reactions, + get_pull_request_comment_reactions, {"pull_request_id": "1", "comment_id": "1"}, _check_pr_comment_reactions, ), ( - SourceCodeManager.create_pull_request_comment_reaction, + create_pull_request_comment_reaction, {"pull_request_id": "1", "comment_id": "1", "reaction": "eyes"}, _check_created_reaction, ), ( - SourceCodeManager.delete_pull_request_comment_reaction, + delete_pull_request_comment_reaction, {"pull_request_id": "1", "comment_id": "1", "reaction_id": "123"}, _check_none, ), - (SourceCodeManager.get_issue_reactions, {"issue_id": "1"}, _check_issue_reactions), + (get_issue_reactions, {"issue_id": "1"}, _check_issue_reactions), ( - SourceCodeManager.create_issue_reaction, + create_issue_reaction, {"issue_id": "1", "reaction": "eyes"}, _check_created_reaction, ), - (SourceCodeManager.delete_issue_reaction, {"issue_id": "1", "reaction_id": "456"}, _check_none), + (delete_issue_reaction, {"issue_id": "1", "reaction_id": "456"}, _check_none), ( - SourceCodeManager.get_pull_request_reactions, + get_pull_request_reactions, {"pull_request_id": "1"}, _check_pr_reactions, ), ( - SourceCodeManager.create_pull_request_reaction, + create_pull_request_reaction, {"pull_request_id": "1", "reaction": "eyes"}, _check_created_reaction, ), ( - SourceCodeManager.delete_pull_request_reaction, + delete_pull_request_reaction, {"pull_request_id": "1", "reaction_id": "456"}, _check_none, ), - (SourceCodeManager.get_branch, {"branch": "main"}, _check_get_branch), - (SourceCodeManager.create_branch, {"branch": "feature", "sha": "abc123"}, _check_create_branch), - (SourceCodeManager.update_branch, {"branch": "feature", "sha": "def456"}, _check_update_branch), + (get_branch, {"branch": "main"}, _check_get_branch), + (create_branch, {"branch": "feature", "sha": "abc123"}, _check_create_branch), + (update_branch, {"branch": "feature", "sha": "def456"}, _check_update_branch), ( - SourceCodeManager.create_git_blob, + create_git_blob, {"content": "hello", "encoding": "utf-8"}, _check_create_git_blob, ), - (SourceCodeManager.get_file_content, {"path": "README.md"}, _check_file_content), - (SourceCodeManager.get_commit, {"sha": "abc123"}, _check_get_commit), - (SourceCodeManager.get_commits, {}, _check_get_commits), - (SourceCodeManager.get_commits_by_path, {"path": "src/main.py"}, _check_get_commits), + (get_file_content, {"path": "README.md"}, _check_file_content), + (get_commit, {"sha": "abc123"}, _check_get_commit), + (get_commits, {}, _check_get_commits), + (get_commits_by_path, {"path": "src/main.py"}, _check_get_commits), ( - SourceCodeManager.compare_commits, + compare_commits, {"start_sha": "aaa", "end_sha": "bbb"}, _check_compare_commits, ), - (SourceCodeManager.get_tree, {"tree_sha": "tree123"}, _check_get_tree), - (SourceCodeManager.get_git_commit, {"sha": "abc123"}, _check_get_git_commit), + (get_tree, {"tree_sha": "tree123"}, _check_get_tree), + (get_git_commit, {"sha": "abc123"}, _check_get_git_commit), ( - SourceCodeManager.create_git_tree, + create_git_tree, {"tree": [{"path": "f.py", "mode": "100644", "type": "blob", "sha": "x"}]}, _check_create_git_tree, ), ( - SourceCodeManager.create_git_commit, + create_git_commit, {"message": "msg", "tree_sha": "t", "parent_shas": ["p"]}, _check_create_git_commit, ), - (SourceCodeManager.get_pull_request_files, {"pull_request_id": "1"}, _check_pr_files), - (SourceCodeManager.get_pull_request_commits, {"pull_request_id": "1"}, _check_pr_commits), - (SourceCodeManager.get_pull_request_diff, {"pull_request_id": "1"}, _check_pr_diff), - (SourceCodeManager.get_pull_requests, {}, _check_list_pull_requests), + (get_pull_request_files, {"pull_request_id": "1"}, _check_pr_files), + (get_pull_request_commits, {"pull_request_id": "1"}, _check_pr_commits), + (get_pull_request_diff, {"pull_request_id": "1"}, _check_pr_diff), + (get_pull_requests, {}, _check_list_pull_requests), ( - SourceCodeManager.create_pull_request, + create_pull_request, {"title": "T", "body": "B", "head": "h", "base": "b"}, _check_create_pull_request, ), ( - SourceCodeManager.create_pull_request_draft, + create_pull_request_draft, {"title": "T", "body": "B", "head": "h", "base": "b"}, _check_create_pull_request, ), - (SourceCodeManager.update_pull_request, {"pull_request_id": "1"}, _check_update_pull_request), + (update_pull_request, {"pull_request_id": "1"}, _check_update_pull_request), ( - SourceCodeManager.request_review, + request_review, {"pull_request_id": "1", "reviewers": ["user1"]}, _check_none, ), ( - SourceCodeManager.create_review_comment_file, + create_review_comment_file, { "pull_request_id": "1", "commit_id": "abc", @@ -555,7 +604,7 @@ def _check_update_check_run(result: Any) -> None: _check_review_comment, ), ( - SourceCodeManager.create_review_comment_reply, + create_review_comment_reply, { "pull_request_id": "1", "body": "comment", @@ -564,7 +613,7 @@ def _check_update_check_run(result: Any) -> None: _check_review_comment, ), ( - SourceCodeManager.create_review, + create_review, { "pull_request_id": "1", "commit_sha": "abc", @@ -574,22 +623,22 @@ def _check_update_check_run(result: Any) -> None: _check_review, ), ( - SourceCodeManager.create_check_run, + create_check_run, {"name": "check", "head_sha": "abc"}, _check_create_check_run, ), ( - SourceCodeManager.get_check_run, + get_check_run, {"check_run_id": "300"}, _check_get_check_run, ), ( - SourceCodeManager.update_check_run, + update_check_run, {"check_run_id": "300"}, _check_update_check_run, ), ( - SourceCodeManager.minimize_comment, + minimize_comment, {"comment_node_id": "IC_abc", "reason": "OUTDATED"}, _check_none, ), @@ -624,6 +673,7 @@ def get_issue_reactions( scm = SourceCodeManager(FailingProvider()) with pytest.raises(SCMProviderException): + assert isinstance(scm, GetIssueReactionsProtocol) scm.get_issue_reactions(issue_id="1") @@ -643,13 +693,16 @@ def is_rate_limited(self, referrer: Referrer) -> bool: return False -@pytest.mark.parametrize(("method", "kwargs"), ALL_ACTIONS) -def test_exec_raises_provider_not_supported_for_all_actions(method: str, kwargs: dict[str, Any]): +@pytest.mark.parametrize(("action", "kwargs"), ALL_ACTIONS) +def test_exec_raises_provider_not_supported_for_all_actions( + action: Callable[..., Any], + kwargs: dict[str, Any], +): """Every SCM action raises SCMProviderNotSupported when the provider lacks the protocol.""" scm = SourceCodeManager(MinimalProvider()) - with pytest.raises(SCMProviderNotSupported): - getattr(scm, method)(**kwargs) + with pytest.raises(AttributeError): + action(scm, **kwargs) def test_exec_wraps_unhandled_exception(): @@ -662,6 +715,7 @@ def get_branch(self, branch, request_options=None): scm = SourceCodeManager(ExplodingProvider()) with pytest.raises(SCMUnhandledException): + assert isinstance(scm, GetBranchProtocol) scm.get_branch(branch="main") @@ -678,6 +732,7 @@ def get_branch(self, branch, request_options=None): ) with pytest.raises(SCMUnhandledException): + assert isinstance(scm, GetBranchProtocol) scm.get_branch(branch="main") assert metrics == [("sentry.scm.actions.failed", 1, {})] @@ -692,6 +747,7 @@ def test_exec_passes_custom_referrer(): referrer="autofix", record_count=lambda k, a, t: metrics.append((k, a, t)), ) + assert isinstance(scm, GetBranchProtocol) scm.get_branch(branch="main") referrer_metrics = [(k, a, t) for k, a, t in metrics if "referrer" in t] @@ -700,71 +756,6 @@ def test_exec_passes_custom_referrer(): ] -class TestCan: - """Tests for SourceCodeManager.can().""" - - def test_can_returns_true_for_all_known_actions_with_full_provider(self): - """A provider implementing every protocol satisfies every action.""" - scm = SourceCodeManager(BaseTestProvider()) - actions = [name for name, _kwargs in ALL_ACTIONS] - result = scm.can(actions) - assert all(result.values()) - assert set(result.keys()) == set(actions) - - def test_can_returns_false_when_provider_lacks_protocol(self): - """A minimal provider that implements no action protocols fails every action.""" - scm = SourceCodeManager(MinimalProvider()) - for action_name, _ in ALL_ACTIONS: - result = scm.can([action_name]) - assert result[action_name] is False, f"Expected {action_name!r} to be False" - - def test_can_returns_false_for_unknown_action(self): - """An action name not in ActionMap causes can() to return False.""" - scm = SourceCodeManager(BaseTestProvider()) - result = scm.can(["nonexistent_action"]) - assert result == {"nonexistent_action": False} - - def test_can_returns_empty_dict_for_empty_list(self): - """An empty action list returns an empty dict.""" - scm = SourceCodeManager(MinimalProvider()) - assert scm.can([]) == {} - - def test_can_mixed_supported_and_unsupported(self): - """Returns a mix of True and False for supported and unsupported actions.""" - scm = SourceCodeManager(BaseTestProvider()) - result = scm.can(["get_branch", "nonexistent_action"]) - assert result == {"get_branch": True, "nonexistent_action": False} - - def test_can_with_partial_provider(self): - """A provider implementing only branch protocols passes branch checks but not others.""" - - class BranchOnlyProvider: - organization_id: int = 1 - repository: Repository = { - "integration_id": 1, - "name": "test", - "organization_id": 1, - "is_active": True, - "external_id": None, - } - - def is_rate_limited(self, referrer: Referrer) -> bool: - return False - - def get_branch(self, branch, request_options=None): - pass - - def create_branch(self, branch, sha): - pass - - scm = SourceCodeManager(BranchOnlyProvider()) - result = scm.can(["get_branch", "create_branch"]) - assert result == {"get_branch": True, "create_branch": True} - - result = scm.can(["get_branch", "get_commit"]) - assert result == {"get_branch": True, "get_commit": False} - - def test_exec_passes_custom_record_count(): """A custom record_count callable provided at construction is used by _exec.""" calls: list[tuple[str, int, dict[str, str]]] = [] @@ -773,6 +764,7 @@ def custom_record(key: str, amount: int, tags: dict[str, str]) -> None: calls.append((key, amount, tags)) scm = SourceCodeManager(BaseTestProvider(), record_count=custom_record) + assert isinstance(scm, GetBranchProtocol) scm.get_branch(branch="main") assert len(calls) == 2 From 537ef288d3b8d284fb91cea69d79e5c1af7f38ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vjeran=20Grozdani=C4=87?= Date: Mon, 23 Mar 2026 14:51:32 +0100 Subject: [PATCH 04/16] ref(db): Remove stale entries from create_or_update allowlist (#111287) - Remove `project_template_detail.py` and `project_template_option.py` from the `create_or_update` allowlist in `test_no_create_or_update_usage.py` - These files no longer exist in the codebase Part of the ongoing effort to deprecate and remove the legacy `create_or_update` in favor of Django's `update_or_create`. --- tests/sentry/test_no_create_or_update_usage.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/sentry/test_no_create_or_update_usage.py b/tests/sentry/test_no_create_or_update_usage.py index 08a83b969439d9..9cae04c25f9d2e 100644 --- a/tests/sentry/test_no_create_or_update_usage.py +++ b/tests/sentry/test_no_create_or_update_usage.py @@ -18,7 +18,6 @@ "src/sentry/issues/endpoints/organization_group_search_view_visit.py", "src/sentry/api/helpers/group_index/update.py", "src/sentry/api/endpoints/organization_pinned_searches.py", - "src/sentry/api/endpoints/project_template_detail.py", "src/sentry/releases/endpoints/release_deploys.py", "src/sentry/api/endpoints/organization_unsubscribe.py", "src/sentry/api/endpoints/organization_recent_searches.py", @@ -31,7 +30,6 @@ "src/sentry/models/release.py", "src/sentry/models/releases/set_commits.py", "src/sentry/models/options/organization_option.py", - "src/sentry/models/options/project_template_option.py", "src/sentry/audit_log/services/log/impl.py", "src/sentry/utils/mockdata/core.py", "src/sentry/core/endpoints/organization_details.py", From d129221c19c3d0d8fa54913b0141654d43465519 Mon Sep 17 00:00:00 2001 From: Alex Sohn <44201357+alexsohn1126@users.noreply.github.com> Date: Mon, 23 Mar 2026 09:58:34 -0400 Subject: [PATCH 05/16] feat(seer): Add SeerOperatorExplorerCache for completion hook payloads (#109238) ## Summary - Adds `SeerOperatorExplorerCache` class to `cache.py`, keyed by `run_id` with 1-hour TTL - Simpler than the Autofix cache (no pre/post migration) since Explorer receives a `run_id` directly from the trigger call - Adds `OPERATOR_CACHE_SET_EXPLORER` and `OPERATOR_CACHE_GET_EXPLORER` metric interaction types - Adds tests for cache key format, set, get, and cache miss ISWF-2024 --------- Co-authored-by: Leander Rodrigues Co-authored-by: Claude --- src/sentry/seer/entrypoints/cache.py | 53 +++++++++++++++++-- src/sentry/seer/entrypoints/metrics.py | 2 + tests/sentry/seer/entrypoints/test_cache.py | 57 ++++++++++++++++++++- 3 files changed, 108 insertions(+), 4 deletions(-) diff --git a/src/sentry/seer/entrypoints/cache.py b/src/sentry/seer/entrypoints/cache.py index 8a8ba6684b3933..5dd3e86131408b 100644 --- a/src/sentry/seer/entrypoints/cache.py +++ b/src/sentry/seer/entrypoints/cache.py @@ -1,4 +1,5 @@ import logging +from enum import StrEnum from sentry.seer.entrypoints.metrics import ( SeerOperatorEventLifecycleMetric, @@ -10,7 +11,15 @@ logger = logging.getLogger(__name__) + +class CacheHaltReason(StrEnum): + CACHE_MISS = "cache_miss" + POST_CACHE_EXISTS = "post_cache_exists" + NO_PRE_CACHE = "no_pre_cache" + + AUTOFIX_CACHE_TIMEOUT_SECONDS = 60 * 60 * 12 # 12 hours +EXPLORER_CACHE_TIMEOUT_SECONDS = 60 * 60 # 1 hour class SeerOperatorAutofixCache[CachePayloadT]: @@ -130,7 +139,7 @@ def get( ) if not cache_result: - lifecycle.record_halt(halt_reason="cache_miss") + lifecycle.record_halt(halt_reason=CacheHaltReason.CACHE_MISS) return None # If we do have a run_id cache, we can delete the pre-autofix cache to prevent autofix @@ -168,11 +177,11 @@ def migrate( ) # If we already have a post-autofix cache, and we're not overwriting, skip. if not overwrite and post_cache_result: - lifecycle.record_halt(halt_reason="post_cache_exists") + lifecycle.record_halt(halt_reason=CacheHaltReason.POST_CACHE_EXISTS) continue # If we don't have a pre-autofix cache, nothing to migrate, skip. if not pre_cache_result: - lifecycle.record_halt(halt_reason="no_pre_cache") + lifecycle.record_halt(halt_reason=CacheHaltReason.NO_PRE_CACHE) continue post_cache_key = cls._get_post_autofix_cache_key( entrypoint_key=entrypoint_key, run_id=to_run_id @@ -186,3 +195,41 @@ def migrate( lifecycle.add_extras( {"from_key": pre_cache_result["key"], "to_key": post_cache_key} ) + + +class SeerOperatorExplorerCache[CachePayloadT]: + """ + Cache for Explorer completion hook payloads, keyed only by run_id. + + Unlike the Autofix cache which needs pre/post migration (group_id -> run_id), + Explorer receives a run_id directly from the trigger call, so this cache is + simpler: set on trigger, get on completion hook. + """ + + @classmethod + def _get_cache_key(cls, *, entrypoint_key: str, run_id: int) -> str: + return f"seer:explorer:{entrypoint_key}:{run_id}" + + @classmethod + def set(cls, *, entrypoint_key: str, run_id: int, cache_payload: CachePayloadT) -> None: + with SeerOperatorEventLifecycleMetric( + interaction_type=SeerOperatorInteractionType.OPERATOR_CACHE_SET_EXPLORER, + entrypoint_key=entrypoint_key, + ).capture() as lifecycle: + cache_key = cls._get_cache_key(entrypoint_key=entrypoint_key, run_id=run_id) + lifecycle.add_extras({"run_id": run_id, "cache_key": cache_key}) + cache.set(cache_key, cache_payload, timeout=EXPLORER_CACHE_TIMEOUT_SECONDS) + + @classmethod + def get(cls, *, entrypoint_key: str, run_id: int) -> CachePayloadT | None: + with SeerOperatorEventLifecycleMetric( + interaction_type=SeerOperatorInteractionType.OPERATOR_CACHE_GET_EXPLORER, + entrypoint_key=entrypoint_key, + ).capture() as lifecycle: + cache_key = cls._get_cache_key(entrypoint_key=entrypoint_key, run_id=run_id) + lifecycle.add_extras({"run_id": run_id, "cache_key": cache_key}) + cache_payload = cache.get(cache_key) + if not cache_payload: + lifecycle.record_halt(halt_reason=CacheHaltReason.CACHE_MISS) + return None + return cache_payload diff --git a/src/sentry/seer/entrypoints/metrics.py b/src/sentry/seer/entrypoints/metrics.py index 6c6e25dd869bc1..f993fa8f60cb91 100644 --- a/src/sentry/seer/entrypoints/metrics.py +++ b/src/sentry/seer/entrypoints/metrics.py @@ -19,6 +19,8 @@ class SeerOperatorInteractionType(StrEnum): ENTRYPOINT_ON_TRIGGER_AUTOFIX_ALREADY_EXISTS = "entrypoint_on_trigger_autofix_already_exists" ENTRYPOINT_CREATE_AUTOFIX_CACHE_PAYLOAD = "entrypoint_create_autofix_cache_payload" ENTRYPOINT_ON_AUTOFIX_UPDATE = "entrypoint_on_autofix_update" + OPERATOR_CACHE_SET_EXPLORER = "cache_set_explorer" + OPERATOR_CACHE_GET_EXPLORER = "cache_get_explorer" @dataclass diff --git a/tests/sentry/seer/entrypoints/test_cache.py b/tests/sentry/seer/entrypoints/test_cache.py index 17547ec194be6d..9988630f10f7fb 100644 --- a/tests/sentry/seer/entrypoints/test_cache.py +++ b/tests/sentry/seer/entrypoints/test_cache.py @@ -2,7 +2,13 @@ from unittest.mock import patch from fixtures.seer.webhooks import MOCK_GROUP_ID, MOCK_RUN_ID -from sentry.seer.entrypoints.cache import AUTOFIX_CACHE_TIMEOUT_SECONDS, SeerOperatorAutofixCache +from sentry.seer.entrypoints.cache import ( + AUTOFIX_CACHE_TIMEOUT_SECONDS, + EXPLORER_CACHE_TIMEOUT_SECONDS, + CacheHaltReason, + SeerOperatorAutofixCache, + SeerOperatorExplorerCache, +) from sentry.seer.entrypoints.types import SeerEntrypointKey from sentry.testutils.cases import TestCase @@ -219,3 +225,52 @@ def test_migrate_overwrite(self, mock_cache): timeout=AUTOFIX_CACHE_TIMEOUT_SECONDS, ) mock_cache.delete.assert_called_once_with(self.pre_cache_key) + + +class SeerOperatorExplorerCacheTest(TestCase): + def setUp(self): + self.entrypoint_key = str(SeerEntrypointKey.SLACK) + self.cache_key = SeerOperatorExplorerCache._get_cache_key( + entrypoint_key=self.entrypoint_key, run_id=MOCK_RUN_ID + ) + + def test_get_cache_key(self): + assert self.cache_key == f"seer:explorer:{self.entrypoint_key}:{MOCK_RUN_ID}" + + @patch("sentry.seer.entrypoints.cache.cache.set") + def test_set(self, mock_cache_set): + payload = MockCachePayload(thread_id="explorer_payload") + SeerOperatorExplorerCache.set( + entrypoint_key=self.entrypoint_key, + run_id=MOCK_RUN_ID, + cache_payload=payload, + ) + mock_cache_set.assert_called_once_with( + self.cache_key, + payload, + timeout=EXPLORER_CACHE_TIMEOUT_SECONDS, + ) + + @patch("sentry.seer.entrypoints.cache.cache.get") + def test_get(self, mock_cache_get): + payload = MockCachePayload(thread_id="explorer_payload") + mock_cache_get.return_value = payload + + result = SeerOperatorExplorerCache.get( + entrypoint_key=self.entrypoint_key, run_id=MOCK_RUN_ID + ) + + mock_cache_get.assert_called_once_with(self.cache_key) + assert result == payload + + @patch("sentry.integrations.utils.metrics.EventLifecycle.record_halt") + @patch("sentry.seer.entrypoints.cache.cache.get") + def test_get_cache_miss(self, mock_cache_get, mock_record_halt): + mock_cache_get.return_value = None + + result = SeerOperatorExplorerCache.get( + entrypoint_key=self.entrypoint_key, run_id=MOCK_RUN_ID + ) + + assert result is None + mock_record_halt.assert_called_once_with(halt_reason=CacheHaltReason.CACHE_MISS) From 2801a91b766aaadb816ac5d1c7919b3603931009 Mon Sep 17 00:00:00 2001 From: Shashank Jarmale Date: Mon, 23 Mar 2026 10:04:10 -0400 Subject: [PATCH 06/16] feat(occurrences on eap): Implement errors EAP query for organization events tracing (#111093) Implements double reads of occurrences from EAP for the errors query in `query_trace_data` in `src/sentry/api/endpoints/organization_events_trace.py`. --- .../endpoints/organization_events_trace.py | 71 ++++++++++++++++ .../test_organization_events_trace.py | 80 +++++++++++++++++++ 2 files changed, 151 insertions(+) diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py index 9f5e4baa23a8f9..e13ff81b50b804 100644 --- a/src/sentry/api/endpoints/organization_events_trace.py +++ b/src/sentry/api/endpoints/organization_events_trace.py @@ -768,6 +768,77 @@ def query_trace_data( for result, query in zip(results, [transaction_query, error_query, occurrence_query]) ] + errors_callsite = "api.trace.query_trace_data.errors" + if EAPOccurrencesComparator.should_check_experiment(errors_callsite): + try: + eap_error_result = Occurrences.run_table_query( + params=snuba_params, + query_string=f"trace:{trace_id}", + selected_columns=[ + "id", + "project_id", + "project.name", + "timestamp", + "span_id", + "transaction", + "group_id", + "title", + "message", + "level", + ], + orderby=["id"], + offset=0, + limit=limit, + referrer=Referrer.API_TRACE_VIEW_GET_EVENTS.value, + config=SearchResolverConfig(), + occurrence_category=OccurrenceCategory.ERROR, + ) + eap_errors = [ + { + "id": row.get("id", ""), + "project": row.get("project.name", ""), + "project.id": row.get("project_id"), + "timestamp": row.get("timestamp", ""), + "timestamp_ms": row.get("timestamp", ""), + "trace.span": row.get("span_id", ""), + "transaction": row.get("transaction", ""), + "issue": row.get("group_id"), + "issue.id": row.get("group_id"), + "title": row.get("title", ""), + "message": row.get("message", ""), + "tags[level]": row.get("level", ""), + } + for row in eap_error_result.get("data", []) + ] + except Exception: + logger.exception( + "Fetching error occurrences for trace from EAP failed in query_trace_data", + extra={ + "trace_id": trace_id, + "organization_id": ( + snuba_params.organization.id if snuba_params.organization else None + ), + }, + ) + eap_errors = [] + + transformed_results[1] = EAPOccurrencesComparator.check_and_choose( + control_data=transformed_results[1], + experimental_data=eap_errors, + callsite=errors_callsite, + is_experimental_data_a_null_result=len(eap_errors) == 0, + reasonable_match_comparator=lambda snuba, eap: {e["id"] for e in eap}.issubset( + {e["id"] for e in snuba} + ), + debug_context={ + "trace_id": trace_id, + "organization_id": ( + snuba_params.organization.id if snuba_params.organization else None + ), + "project_ids": snuba_params.project_ids, + }, + ) + # Join group IDs from the occurrence dataset to transactions data occurrence_issue_ids = defaultdict(list) occurrence_ids = defaultdict(list) diff --git a/tests/snuba/api/endpoints/test_organization_events_trace.py b/tests/snuba/api/endpoints/test_organization_events_trace.py index 74ff34633a78f3..92ecd4dd0b4c96 100644 --- a/tests/snuba/api/endpoints/test_organization_events_trace.py +++ b/tests/snuba/api/endpoints/test_organization_events_trace.py @@ -6,6 +6,7 @@ from django.urls import NoReverseMatch, reverse from sentry import options +from sentry.api.endpoints.organization_events_trace import query_trace_data from sentry.issues.grouptype import ( PerformanceFileIOMainThreadGroupType, PerformanceSlowDBQueryGroupType, @@ -13,6 +14,7 @@ from sentry.issues.issue_occurrence import IssueOccurrence from sentry.models.group import Group from sentry.search.eap.occurrences.rollout_utils import EAPOccurrencesComparator +from sentry.search.events.types import SnubaParams from sentry.testutils.cases import OccurrenceTestCase, TraceTestCase from sentry.utils.samples import load_data from tests.snuba.api.endpoints.test_organization_events import OrganizationEventsEndpointTestBase @@ -1520,6 +1522,84 @@ def test_load_performance_issues_with_eap_as_source_of_truth(self) -> None: assert perf_issue["project_id"] == self.project.id assert perf_issue["suspect_spans"] == [offender_span_id] + def test_query_trace_data_errors_with_eap_as_source_of_truth(self) -> None: + self.load_trace() + + group_1 = self.create_group(project=self.project) + group_2 = self.create_group(project=self.project) + error_event_id_1 = uuid4().hex + error_event_id_2 = uuid4().hex + self.store_eap_items( + [ + self.create_eap_occurrence( + project=self.project, + group_id=group_1.id, + trace_id=self.trace_id, + event_id=error_event_id_1, + timestamp=self.day_ago, + level="error", + title="First EAP error", + transaction="/api/first", + ), + self.create_eap_occurrence( + project=self.project, + group_id=group_2.id, + trace_id=self.trace_id, + event_id=error_event_id_2, + timestamp=self.day_ago, + level="warning", + title="Second EAP error", + transaction="/api/second", + ), + ] + ) + + snuba_params = SnubaParams( + start=self.day_ago - timedelta(hours=1), + end=self.day_ago + timedelta(hours=1), + organization=self.organization, + projects=[self.project, self.gen1_project, self.gen2_project], + ) + + with self.options( + { + EAPOccurrencesComparator._should_eval_option_name(): True, + EAPOccurrencesComparator._callsite_allowlist_option_name(): [ + "api.trace.query_trace_data.errors" + ], + } + ): + _transactions, errors = query_trace_data( + trace_id=self.trace_id, + snuba_params=snuba_params, + transaction_params=snuba_params, + limit=100, + event_id=None, + ) + + assert len(errors) == 2 + errors_by_id = {e["id"]: e for e in errors} + + error_1 = errors_by_id[error_event_id_1] + assert error_1["issue.id"] == group_1.id + assert error_1["project.id"] == self.project.id + assert error_1["project"] == self.project.slug + assert error_1["title"] == "First EAP error" + assert error_1["tags[level]"] == "error" + assert error_1["transaction"] == "/api/first" + assert error_1["timestamp"] is not None + assert error_1["timestamp_ms"] is not None + assert "trace.span" in error_1 + assert "message" in error_1 + + error_2 = errors_by_id[error_event_id_2] + assert error_2["issue.id"] == group_2.id + assert error_2["project.id"] == self.project.id + assert error_2["project"] == self.project.slug + assert error_2["title"] == "Second EAP error" + assert error_2["tags[level]"] == "warning" + assert error_2["transaction"] == "/api/second" + class OrganizationEventsTraceMetaEndpointTest( OrganizationEventsTraceEndpointBase, OccurrenceTestCase From 30448b1248e2c2949ac59cabcd61bc2b9d6f2329 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Mon, 23 Mar 2026 10:07:08 -0400 Subject: [PATCH 07/16] fix(tasks) Use SingletonProducer to better manage producer buffers (#111191) Wrap the KafkaProducer with a SingletonProducer. The SingletonProducer keeps producer buffers smaller (1000 vs 10000) and ensures that produce buffers are drained during shutdown. --- pyproject.toml | 2 +- src/sentry/options/defaults.py | 6 ++++++ src/sentry/taskworker/adapters.py | 13 +++++++++---- uv.lock | 8 ++++---- 4 files changed, 20 insertions(+), 9 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4f6a46a0d7f296..ce788d1ada3ce7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -100,7 +100,7 @@ dependencies = [ "statsd>=3.3.0", "structlog>=22.1.0", "symbolic>=12.14.1", - "taskbroker-client>=0.1.6", + "taskbroker-client>=0.1.7", "tiktoken>=0.8.0", "tokenizers>=0.22.0", "tldextract>=5.1.2", diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 294cec3e1de0bf..162df8b8ff21d9 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -3858,6 +3858,12 @@ default=True, flags=FLAG_AUTOMATOR_MODIFIABLE, ) +register( + "taskworker.producer.max_futures", + type=Int, + default=1000, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) register( "taskworker.route.overrides", default={}, diff --git a/src/sentry/taskworker/adapters.py b/src/sentry/taskworker/adapters.py index 6d5234c71751b8..8aaba24ba42b30 100644 --- a/src/sentry/taskworker/adapters.py +++ b/src/sentry/taskworker/adapters.py @@ -24,7 +24,7 @@ from sentry.silo.base import SiloMode from sentry.utils import json from sentry.utils import metrics as sentry_metrics -from sentry.utils.arroyo_producer import get_arroyo_producer +from sentry.utils.arroyo_producer import SingletonProducer, get_arroyo_producer from sentry.utils.memory import track_memory_usage as sentry_track_memory_usage @@ -125,7 +125,7 @@ def route_namespace(self, name: str) -> str: _producer_local = threading.local() -def make_producer(topic: str) -> KafkaProducer: +def make_producer(topic: str) -> SingletonProducer: """ Producer factory for taskbroker-client. @@ -134,8 +134,13 @@ def make_producer(topic: str) -> KafkaProducer: """ if not hasattr(_producer_local, "producers"): _producer_local.producers = {} + if topic not in _producer_local.producers: - _producer_local.producers[topic] = get_arroyo_producer( - f"sentry.taskworker.{topic}", Topic(topic) + + def factory() -> KafkaProducer: + return get_arroyo_producer(f"sentry.taskworker.{topic}", Topic(topic)) + + _producer_local.producers[topic] = SingletonProducer( + factory, max_futures=options.get("taskworker.producer.max_futures") ) return _producer_local.producers[topic] diff --git a/uv.lock b/uv.lock index 935886ed12745e..33aea7127fda06 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.13" resolution-markers = [ "(python_full_version >= '3.14' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform == 'linux')", @@ -2362,7 +2362,7 @@ requires-dist = [ { name = "stripe", specifier = ">=6.7.0" }, { name = "structlog", specifier = ">=22.1.0" }, { name = "symbolic", specifier = ">=12.14.1" }, - { name = "taskbroker-client", specifier = ">=0.1.6" }, + { name = "taskbroker-client", specifier = ">=0.1.7" }, { name = "tiktoken", specifier = ">=0.8.0" }, { name = "tldextract", specifier = ">=5.1.2" }, { name = "tokenizers", specifier = ">=0.22.0" }, @@ -2735,7 +2735,7 @@ wheels = [ [[package]] name = "taskbroker-client" -version = "0.1.6" +version = "0.1.7" source = { registry = "https://pypi.devinfra.sentry.io/simple" } dependencies = [ { name = "confluent-kafka", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, @@ -2752,7 +2752,7 @@ dependencies = [ { name = "zstandard", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, ] wheels = [ - { url = "https://pypi.devinfra.sentry.io/wheels/taskbroker_client-0.1.6-py3-none-any.whl", hash = "sha256:9e828b0ec816faa2a05464c9f2e4fd32652d62809d8d553617b37de6d96a5ecd" }, + { url = "https://pypi.devinfra.sentry.io/wheels/taskbroker_client-0.1.7-py3-none-any.whl", hash = "sha256:3f15c3f4f63546331626e006934c92faefd59c3516d8d58676b3bbcbce8d4828" }, ] [[package]] From 80216b789651dcaafb1f61a0132129927fc73e7d Mon Sep 17 00:00:00 2001 From: Alex Sohn <44201357+alexsohn1126@users.noreply.github.com> Date: Mon, 23 Mar 2026 10:33:36 -0400 Subject: [PATCH 08/16] feat(seer): Add SeerExplorerResponse notification data and Slack renderer (#109317) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Adds `SeerExplorerResponse` dataclass and `SeerExplorerResponseTemplate` to the notification platform for Explorer completion data - Adds `SEER_EXPLORER_RESPONSE` to `NotificationSource` enum and source map - Adds `_render_explorer_response()` to `SeerSlackRenderer` — renders a section block (summary or default text) with a "View in Sentry" link button - Wires the new data type into the `SeerSlackRenderer.render()` dispatch ISWF-2025 --------- Co-authored-by: Leander Rodrigues Co-authored-by: Claude Co-authored-by: Claude Opus 4.6 --- .../platform/slack/renderers/seer.py | 23 +++++++ .../notifications/platform/templates/seer.py | 47 +++++++++++++ src/sentry/notifications/platform/types.py | 4 ++ .../platform/slack/renderers/test_seer.py | 69 +++++++++++++++++++ .../platform/templates/test_seer.py | 29 ++++++++ 5 files changed, 172 insertions(+) diff --git a/src/sentry/notifications/platform/slack/renderers/seer.py b/src/sentry/notifications/platform/slack/renderers/seer.py index fd6c5f8d71f995..bba5db870d414e 100644 --- a/src/sentry/notifications/platform/slack/renderers/seer.py +++ b/src/sentry/notifications/platform/slack/renderers/seer.py @@ -11,6 +11,7 @@ ContextBlock, InteractiveElement, LinkButtonElement, + MarkdownBlock, MarkdownTextObject, PlainTextObject, RichTextBlock, @@ -26,6 +27,8 @@ SeerAutofixError, SeerAutofixTrigger, SeerAutofixUpdate, + SeerExplorerError, + SeerExplorerResponse, ) from sentry.notifications.platform.types import ( NotificationData, @@ -91,6 +94,10 @@ def render[DataT: NotificationData]( return cls._render_autofix_error(data) elif isinstance(data, SeerAutofixUpdate): return cls._render_autofix_update(data) + elif isinstance(data, SeerExplorerError): + return cls._render_explorer_error(data) + elif isinstance(data, SeerExplorerResponse): + return cls._render_explorer_response(data) else: raise ValueError(f"SeerSlackRenderer does not support {data.__class__.__name__}") @@ -187,6 +194,22 @@ def _render_autofix_update(cls, data: SeerAutofixUpdate) -> SlackRenderable: return SlackRenderable(blocks=blocks, text="Seer has emerged with news from its voyage") + @classmethod + def _render_explorer_error(cls, data: SeerExplorerError) -> SlackRenderable: + return SlackRenderable( + blocks=[ + SectionBlock(text=data.error_title), + SectionBlock(text=MarkdownTextObject(text=f">{data.error_message}")), + ], + text=f"Seer stumbled: {data.error_title}", + ) + + @classmethod + def _render_explorer_response(cls, data: SeerExplorerResponse) -> SlackRenderable: + blocks: list[Block] = [MarkdownBlock(text=data.summary)] + + return SlackRenderable(blocks=blocks, text="Seer Explorer has finished") + @classmethod def _render_link_button( cls, diff --git a/src/sentry/notifications/platform/templates/seer.py b/src/sentry/notifications/platform/templates/seer.py index e751c61238e10d..834011a7462a42 100644 --- a/src/sentry/notifications/platform/templates/seer.py +++ b/src/sentry/notifications/platform/templates/seer.py @@ -159,3 +159,50 @@ def from_update(update: SeerAutofixUpdate) -> SeerAutofixTrigger: organization_id=update.organization_id, stopping_point=stopping_point, ) + + +class SeerExplorerError(NotificationData): + error_message: str + error_title: str = "Seer had some trouble..." + source: NotificationSource = NotificationSource.SEER_EXPLORER_ERROR + + +@template_registry.register(NotificationSource.SEER_EXPLORER_ERROR) +class SeerExplorerErrorTemplate(NotificationTemplate[SeerExplorerError]): + category = NotificationCategory.SEER + example_data = SeerExplorerError( + error_title="Seer had some trouble...", + error_message="Seer could not explore your organization.", + ) + hide_from_debugger = True + + def render(self, data: SeerExplorerError) -> NotificationRenderedTemplate: + return NotificationRenderedTemplate( + subject=data.error_title, + body=[ParagraphBlock(blocks=[PlainTextBlock(text=data.error_message)])], + ) + + +class SeerExplorerResponse(NotificationData): + """Notification data for Explorer completion response in Slack.""" + + run_id: int + organization_id: int + explorer_link: str + summary: str + source: NotificationSource = NotificationSource.SEER_EXPLORER_RESPONSE + + +@template_registry.register(NotificationSource.SEER_EXPLORER_RESPONSE) +class SeerExplorerResponseTemplate(NotificationTemplate[SeerExplorerResponse]): + category = NotificationCategory.SEER + example_data = SeerExplorerResponse( + run_id=12345, + organization_id=1, + explorer_link="https://sentry.sentry.io/explore/seer/12345/", + summary="I've finished analyzing your question.", + ) + hide_from_debugger = True + + def render(self, data: SeerExplorerResponse) -> NotificationRenderedTemplate: + return NotificationRenderedTemplate(subject="Seer Explorer Response", body=[]) diff --git a/src/sentry/notifications/platform/types.py b/src/sentry/notifications/platform/types.py index 5fb79c6ec5dd8d..02bdd17e67ad08 100644 --- a/src/sentry/notifications/platform/types.py +++ b/src/sentry/notifications/platform/types.py @@ -57,6 +57,8 @@ class NotificationSource(StrEnum): SEER_AUTOFIX_TRIGGER = "seer-autofix-trigger" SEER_AUTOFIX_FOOTER = "seer-autofix-footer" SEER_AUTOFIX_SUCCESS = "seer-autofix-success" + SEER_EXPLORER_RESPONSE = "seer-explorer-response" + SEER_EXPLORER_ERROR = "seer-explorer-error" NOTIFICATION_SOURCE_MAP: dict[NotificationCategory, list[NotificationSource]] = { @@ -83,6 +85,8 @@ class NotificationSource(StrEnum): NotificationSource.SEER_AUTOFIX_ERROR, NotificationSource.SEER_AUTOFIX_SUCCESS, NotificationSource.SEER_AUTOFIX_UPDATE, + NotificationSource.SEER_EXPLORER_RESPONSE, + NotificationSource.SEER_EXPLORER_ERROR, ], } diff --git a/tests/sentry/notifications/platform/slack/renderers/test_seer.py b/tests/sentry/notifications/platform/slack/renderers/test_seer.py index e6ebd07fd1a6d8..b41792d5241180 100644 --- a/tests/sentry/notifications/platform/slack/renderers/test_seer.py +++ b/tests/sentry/notifications/platform/slack/renderers/test_seer.py @@ -6,6 +6,7 @@ ButtonElement, ContextBlock, LinkButtonElement, + MarkdownBlock, PlainTextObject, SectionBlock, ) @@ -16,6 +17,8 @@ SeerAutofixCodeChange, SeerAutofixPullRequest, SeerAutofixUpdate, + SeerExplorerError, + SeerExplorerResponse, ) from sentry.seer.autofix.utils import AutofixStoppingPoint from sentry.testutils.cases import TestCase @@ -182,3 +185,69 @@ def test_render_autofix_update_solution_no_next_trigger_when_coding_disabled( # Should only have link button, no next trigger button assert len(actions_block.elements) == 1 assert isinstance(actions_block.elements[0], LinkButtonElement) + + +class SeerSlackRendererExplorerErrorTest(TestCase): + def test_render_explorer_error(self) -> None: + data = SeerExplorerError(error_message="Seer could not explore your organization.") + renderable = SeerSlackRenderer._render_explorer_error(data) + + assert renderable["text"] == "Seer stumbled: Seer had some trouble..." + blocks = renderable["blocks"] + assert len(blocks) == 2 + assert isinstance(blocks[0], SectionBlock) + assert blocks[0].text is not None + assert blocks[0].text.text == "Seer had some trouble..." + assert isinstance(blocks[1], SectionBlock) + assert blocks[1].text is not None + assert ">Seer could not explore your organization." in blocks[1].text.text + + def test_render_explorer_error_custom_title(self) -> None: + data = SeerExplorerError( + error_message="Timeout.", + error_title="Explorer failed", + ) + renderable = SeerSlackRenderer._render_explorer_error(data) + + assert renderable["text"] == "Seer stumbled: Explorer failed" + title_block = renderable["blocks"][0] + assert isinstance(title_block, SectionBlock) + assert title_block.text is not None + assert title_block.text.text == "Explorer failed" + body_block = renderable["blocks"][1] + assert isinstance(body_block, SectionBlock) + assert body_block.text is not None + assert ">Timeout." in body_block.text.text + + +class SeerSlackRendererExplorerTest(TestCase): + def _create_explorer_response(self, summary: str = "") -> SeerExplorerResponse: + return SeerExplorerResponse( + run_id=MOCK_RUN_ID, + organization_id=self.organization.id, + explorer_link=f"https://sentry.sentry.io/explore/seer/{MOCK_RUN_ID}/", + summary=summary, + ) + + def test_render_explorer_response_with_summary(self) -> None: + data = self._create_explorer_response( + summary="Found a spike in 500 errors from the auth service." + ) + renderable = SeerSlackRenderer._render_explorer_response(data) + + assert renderable["text"] == "Seer Explorer has finished" + blocks = renderable["blocks"] + assert len(blocks) == 1 + + assert isinstance(blocks[0], MarkdownBlock) + assert "Found a spike in 500 errors from the auth service." in blocks[0].text + + def test_render_dispatches_to_explorer_response(self) -> None: + data = self._create_explorer_response(summary="Test") + from sentry.notifications.platform.types import NotificationRenderedTemplate + + renderable = SeerSlackRenderer.render( + data=data, + rendered_template=NotificationRenderedTemplate(subject="", body=[]), + ) + assert renderable["text"] == "Seer Explorer has finished" diff --git a/tests/sentry/notifications/platform/templates/test_seer.py b/tests/sentry/notifications/platform/templates/test_seer.py index 22090ff231893e..95a2a1b10a56d1 100644 --- a/tests/sentry/notifications/platform/templates/test_seer.py +++ b/tests/sentry/notifications/platform/templates/test_seer.py @@ -7,8 +7,10 @@ from sentry.notifications.platform.templates.seer import ( SeerAutofixTrigger, SeerAutofixUpdate, + SeerExplorerError, _get_next_stopping_point, ) +from sentry.notifications.platform.types import ParagraphBlock from sentry.seer.autofix.utils import AutofixStoppingPoint from sentry.testutils.cases import TestCase @@ -102,3 +104,30 @@ def test_has_next_trigger_coding_default(self, mock_get_option: Mock) -> None: coding_update = self._create_update(AutofixStoppingPoint.CODE_CHANGES) assert solution_update.has_next_trigger is ENABLE_SEER_CODING_DEFAULT assert coding_update.has_next_trigger is ENABLE_SEER_CODING_DEFAULT + + +class SeerExplorerErrorTemplateTest(TestCase): + def test_render(self) -> None: + from sentry.notifications.platform.templates.seer import SeerExplorerErrorTemplate + + data = SeerExplorerError(error_message="Seer could not explore your organization.") + template = SeerExplorerErrorTemplate() + rendered = template.render(data) + + assert rendered.subject == "Seer had some trouble..." + assert len(rendered.body) == 1 + assert isinstance(rendered.body[0], ParagraphBlock) + assert rendered.body[0].blocks[0].text == "Seer could not explore your organization." + + def test_render_custom_title(self) -> None: + from sentry.notifications.platform.templates.seer import SeerExplorerErrorTemplate + + data = SeerExplorerError( + error_message="Timeout.", + error_title="Explorer failed", + ) + template = SeerExplorerErrorTemplate() + rendered = template.render(data) + + assert rendered.subject == "Explorer failed" + assert rendered.body[0].blocks[0].text == "Timeout." From a2862a753c0d5db97bef52821f4b251185443523 Mon Sep 17 00:00:00 2001 From: geoffg-sentry <165922362+geoffg-sentry@users.noreply.github.com> Date: Mon, 23 Mar 2026 10:46:35 -0400 Subject: [PATCH 09/16] Scope get_latest_issue_event ID query to the organization (#111252) Defense in depth and save us from loading data in memory we don't need. The group_id param is (indirectly) user controlled since seer pulls those issue IDs from PR descriptions. _group_by_short_id scoped the organization, but the ID path didn't have an org filter. We have a post-query check so i's already not a problem in the response, but there's no reason to query for this at all when we can pre-filter. Prevents loading that data in memory and makes both paths consistent. --- src/sentry/seer/fetch_issues/utils.py | 4 +++- tests/sentry/seer/fetch_issues/test_utils.py | 12 ++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/src/sentry/seer/fetch_issues/utils.py b/src/sentry/seer/fetch_issues/utils.py index d36f9376bab981..5411e3839442da 100644 --- a/src/sentry/seer/fetch_issues/utils.py +++ b/src/sentry/seer/fetch_issues/utils.py @@ -152,7 +152,9 @@ def get_latest_issue_event(group_id: int | str, organization_id: int) -> dict[st if isinstance(group_id, str) and not group_id.isdigit(): group = _group_by_short_id(group_id, organization_id) else: - group = Group.objects.filter(id=int(group_id)).first() + group = Group.objects.filter( + id=int(group_id), project__organization_id=organization_id + ).first() if not group: logger.warning( diff --git a/tests/sentry/seer/fetch_issues/test_utils.py b/tests/sentry/seer/fetch_issues/test_utils.py index 47e1d0d915c70e..54fbf3ec503705 100644 --- a/tests/sentry/seer/fetch_issues/test_utils.py +++ b/tests/sentry/seer/fetch_issues/test_utils.py @@ -289,6 +289,18 @@ def test_get_latest_issue_event_wrong_organization(self): results = get_latest_issue_event(group.id, self.organization.id + 1) assert results == {} + def test_get_latest_issue_event_numeric_id_cross_org(self): + """Numeric group ID from another org must not be returned.""" + other_org = self.create_organization(owner=self.create_user()) + other_project = self.create_project(organization=other_org) + data = load_data("python", timestamp=before_now(minutes=1)) + other_event = self.store_event(data=data, project_id=other_project.id) + other_group = other_event.group + assert other_group is not None + + result = get_latest_issue_event(other_group.id, self.organization.id) + assert result == {} + class TestHandleFetchIssuesExceptions(TestCase): def test_handle_fetch_issues_exceptions_success(self): From b2c4940f6390e0576737c8d550ba5a0a695baab0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vjeran=20Grozdani=C4=87?= Date: Mon, 23 Mar 2026 15:48:32 +0100 Subject: [PATCH 10/16] ref(db): Stop using legacy create_or_update in API endpoints (#111288) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Replace `create_or_update` with Django's `update_or_create` in 7 API endpoint files - Remove migrated files from the `create_or_update` allowlist - Parameter mapping: `values={}` → `defaults={}`, filter kwargs unchanged Migrated files: - `organization_pinned_searches.py` — return value ignored - `prompts_activity.py` — return value ignored - `organization_unsubscribe.py` — return value ignored - `organization_recent_searches.py` — `[1]` index → `_, created` tuple unpack - `group_index/update.py` — 2 call sites: manager method + standalone function call - `organization_group_search_view_visit.py` — return value ignored Part of the ongoing effort to deprecate and remove the legacy `create_or_update` in favor of Django's `update_or_create`. --- .../api/endpoints/organization_pinned_searches.py | 4 ++-- .../api/endpoints/organization_recent_searches.py | 6 +++--- src/sentry/api/endpoints/organization_unsubscribe.py | 4 ++-- src/sentry/api/endpoints/prompts_activity.py | 4 ++-- src/sentry/api/helpers/group_index/update.py | 10 ++++------ .../endpoints/organization_group_search_view_visit.py | 4 ++-- tests/sentry/test_no_create_or_update_usage.py | 6 ------ 7 files changed, 15 insertions(+), 23 deletions(-) diff --git a/src/sentry/api/endpoints/organization_pinned_searches.py b/src/sentry/api/endpoints/organization_pinned_searches.py index b2645f1ee6ae49..0f41c5ac7b1724 100644 --- a/src/sentry/api/endpoints/organization_pinned_searches.py +++ b/src/sentry/api/endpoints/organization_pinned_searches.py @@ -50,13 +50,13 @@ def put(self, request: Request, organization: Organization) -> Response: return Response(serializer.errors, status=400) result = serializer.validated_data - SavedSearch.objects.create_or_update( + SavedSearch.objects.update_or_create( organization=organization, name=PINNED_SEARCH_NAME, owner_id=request.user.id, type=result["type"], visibility=Visibility.OWNER_PINNED, - values={"query": result["query"], "sort": result["sort"]}, + defaults={"query": result["query"], "sort": result["sort"]}, ) # This entire endpoint will be removed once custom views are GA'd diff --git a/src/sentry/api/endpoints/organization_recent_searches.py b/src/sentry/api/endpoints/organization_recent_searches.py index 9a52c383b73538..4b6c1398ccfc63 100644 --- a/src/sentry/api/endpoints/organization_recent_searches.py +++ b/src/sentry/api/endpoints/organization_recent_searches.py @@ -73,13 +73,13 @@ def post(self, request: Request, organization) -> Response: if serializer.is_valid(): result = serializer.validated_data - created = RecentSearch.objects.create_or_update( + _, created = RecentSearch.objects.update_or_create( organization_id=organization.id, user_id=request.user.id, type=result["type"], query=result["query"], - values={"last_seen": timezone.now()}, - )[1] + defaults={"last_seen": timezone.now()}, + ) if created: remove_excess_recent_searches(organization, request.user, result["type"]) status = 201 if created else 204 diff --git a/src/sentry/api/endpoints/organization_unsubscribe.py b/src/sentry/api/endpoints/organization_unsubscribe.py index f279a3fc71f20b..17e3031c6082af 100644 --- a/src/sentry/api/endpoints/organization_unsubscribe.py +++ b/src/sentry/api/endpoints/organization_unsubscribe.py @@ -143,9 +143,9 @@ def fetch_instance( return issue def unsubscribe(self, request: Request, instance: Group): - GroupSubscription.objects.create_or_update( + GroupSubscription.objects.update_or_create( group=instance, project_id=instance.project_id, user_id=request.user.pk, - values={"is_active": False}, + defaults={"is_active": False}, ) diff --git a/src/sentry/api/endpoints/prompts_activity.py b/src/sentry/api/endpoints/prompts_activity.py index 669d04b002bdcf..5488af642c64c2 100644 --- a/src/sentry/api/endpoints/prompts_activity.py +++ b/src/sentry/api/endpoints/prompts_activity.py @@ -132,8 +132,8 @@ def put(self, request: Request, organization: Organization, **kwargs) -> Respons try: with transaction.atomic(router.db_for_write(PromptsActivity)): - PromptsActivity.objects.create_or_update( - feature=feature, user_id=request.user.id, values={"data": data}, **fields + PromptsActivity.objects.update_or_create( + feature=feature, user_id=request.user.id, defaults={"data": data}, **fields ) except IntegrityError: pass diff --git a/src/sentry/api/helpers/group_index/update.py b/src/sentry/api/helpers/group_index/update.py index f3b2f38296fad3..09cc3bbb8b38d2 100644 --- a/src/sentry/api/helpers/group_index/update.py +++ b/src/sentry/api/helpers/group_index/update.py @@ -22,7 +22,6 @@ from sentry.analytics.events.manual_issue_assignment import ManualIssueAssignment from sentry.api.serializers import serialize from sentry.api.serializers.models.actor import ActorSerializer, ActorSerializerResponse -from sentry.db.models.query import create_or_update from sentry.hybridcloud.rpc import coerce_id_from from sentry.integrations.tasks.kick_off_status_syncs import kick_off_status_syncs from sentry.issues.grouptype import GroupCategory @@ -928,11 +927,11 @@ def handle_is_subscribed( # subscribed" to "you were subscribed since you were # assigned" just by clicking the "subscribe" button (and you # may no longer be assigned to the issue anyway). - GroupSubscription.objects.create_or_update( + GroupSubscription.objects.update_or_create( user_id=acting_user.id if acting_user else None, group=group, project=project_lookup[group.project_id], - values={"is_active": is_subscribed, "reason": GroupSubscriptionReason.unknown}, + defaults={"is_active": is_subscribed, "reason": GroupSubscriptionReason.unknown}, ) return {"reason": SUBSCRIPTION_REASON_MAP.get(GroupSubscriptionReason.unknown, "unknown")} @@ -990,12 +989,11 @@ def handle_has_seen( if has_seen: for group in group_list: if is_member_map.get(group.project_id): - create_or_update( - GroupSeen, + GroupSeen.objects.update_or_create( group=group, user_id=user_id, project=project_lookup[group.project_id], - values={"last_seen": django_timezone.now()}, + defaults={"last_seen": django_timezone.now()}, ) elif has_seen is False and user_id is not None: GroupSeen.objects.filter( diff --git a/src/sentry/issues/endpoints/organization_group_search_view_visit.py b/src/sentry/issues/endpoints/organization_group_search_view_visit.py index 8278f6a6de87ce..bd713fc2a0ab5d 100644 --- a/src/sentry/issues/endpoints/organization_group_search_view_visit.py +++ b/src/sentry/issues/endpoints/organization_group_search_view_visit.py @@ -36,11 +36,11 @@ def post(self, request: Request, organization: Organization, view_id: str) -> Re return Response(status=status.HTTP_404_NOT_FOUND) # Create or update the last_visited timestamp - GroupSearchViewLastVisited.objects.create_or_update( + GroupSearchViewLastVisited.objects.update_or_create( organization=organization, user_id=request.user.id, group_search_view=view, - values={"last_visited": timezone.now()}, + defaults={"last_visited": timezone.now()}, ) return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/tests/sentry/test_no_create_or_update_usage.py b/tests/sentry/test_no_create_or_update_usage.py index 9cae04c25f9d2e..52234a003a3f6b 100644 --- a/tests/sentry/test_no_create_or_update_usage.py +++ b/tests/sentry/test_no_create_or_update_usage.py @@ -15,13 +15,7 @@ "src/sentry/notifications/services/impl.py", "src/sentry/nodestore/django/backend.py", "src/sentry/issues/ignored.py", - "src/sentry/issues/endpoints/organization_group_search_view_visit.py", - "src/sentry/api/helpers/group_index/update.py", - "src/sentry/api/endpoints/organization_pinned_searches.py", "src/sentry/releases/endpoints/release_deploys.py", - "src/sentry/api/endpoints/organization_unsubscribe.py", - "src/sentry/api/endpoints/organization_recent_searches.py", - "src/sentry/api/endpoints/prompts_activity.py", "src/sentry/dashboards/endpoints/organization_dashboard_details.py", "src/sentry/onboarding_tasks/backends/organization_onboarding_task.py", "src/sentry/explore/endpoints/explore_saved_query_detail.py", From e1f2981d139db2757ba41692009727fd2c1c9a24 Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Mon, 23 Mar 2026 10:51:23 -0400 Subject: [PATCH 11/16] fix(dashboards): Set breakdown legend widget limit to 3 in prebuilt configs (#111291) Ensure all prebuilt dashboard widgets with `legendType: 'breakdown'` have a limit of 3, so broken out legends show a maximum of 3 items. This is just for consistency Fixes DAIN-1380 Co-authored-by: Claude Opus 4.6 --- .../views/dashboards/utils/prebuiltConfigs/ai/aiAgentsModels.ts | 1 + .../utils/prebuiltConfigs/backendOverview/backendOverview.ts | 2 +- .../utils/prebuiltConfigs/nextJsOverview/nextJsOverview.ts | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/static/app/views/dashboards/utils/prebuiltConfigs/ai/aiAgentsModels.ts b/static/app/views/dashboards/utils/prebuiltConfigs/ai/aiAgentsModels.ts index 19fc8befedce15..fb15e5bdf2be21 100644 --- a/static/app/views/dashboards/utils/prebuiltConfigs/ai/aiAgentsModels.ts +++ b/static/app/views/dashboards/utils/prebuiltConfigs/ai/aiAgentsModels.ts @@ -90,6 +90,7 @@ const FIRST_ROW_WIDGETS = spaceWidgetsEquallyOnRow( orderby: '', }, ], + limit: 3, }, ], 0, diff --git a/static/app/views/dashboards/utils/prebuiltConfigs/backendOverview/backendOverview.ts b/static/app/views/dashboards/utils/prebuiltConfigs/backendOverview/backendOverview.ts index dffbc423cc9eb7..6d4ad3d84b22a6 100644 --- a/static/app/views/dashboards/utils/prebuiltConfigs/backendOverview/backendOverview.ts +++ b/static/app/views/dashboards/utils/prebuiltConfigs/backendOverview/backendOverview.ts @@ -189,7 +189,7 @@ export const BACKEND_OVERVIEW_SECOND_ROW_WIDGETS = spaceWidgetsEquallyOnRow( orderby: `-equation|count_if(${SpanFields.CACHE_HIT},equals,false) / count(${SpanFields.SPAN_DURATION})`, }, ], - limit: 4, + limit: 3, widgetType: WidgetType.SPANS, }, ], diff --git a/static/app/views/dashboards/utils/prebuiltConfigs/nextJsOverview/nextJsOverview.ts b/static/app/views/dashboards/utils/prebuiltConfigs/nextJsOverview/nextJsOverview.ts index 64d036070eb387..c9c3d3852f2352 100644 --- a/static/app/views/dashboards/utils/prebuiltConfigs/nextJsOverview/nextJsOverview.ts +++ b/static/app/views/dashboards/utils/prebuiltConfigs/nextJsOverview/nextJsOverview.ts @@ -110,7 +110,7 @@ const SECOND_ROW_WIDGETS = spaceWidgetsEquallyOnRow( widgetType: WidgetType.SPANS, legendType: 'breakdown', interval: '5m', - limit: 4, + limit: 3, queries: [ { name: '', From 512adf966d9f0683c045bbe975926d1f42fe8107 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Mon, 23 Mar 2026 11:17:12 -0400 Subject: [PATCH 12/16] chore(autofix): Only require autofix-on-explorer for automations (#111293) Only require `autofix-on-explorer` to start automations as not all orgs will have `seer-explorer`. --- src/sentry/seer/autofix/issue_summary.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/sentry/seer/autofix/issue_summary.py b/src/sentry/seer/autofix/issue_summary.py index 93528b56df8eac..daca2a5c89e3d1 100644 --- a/src/sentry/seer/autofix/issue_summary.py +++ b/src/sentry/seer/autofix/issue_summary.py @@ -216,9 +216,7 @@ def _trigger_autofix_task( # Route to explorer-based autofix if both feature flags are enabled run_id: int | None = None - if features.has("organizations:seer-explorer", group.organization) and features.has( - "organizations:autofix-on-explorer", group.organization - ): + if features.has("organizations:autofix-on-explorer", group.organization): run_id = trigger_autofix_explorer( group=group, step=AutofixStep.ROOT_CAUSE, From 74a116c28dda7aa5ace72a452cacdd6a13e1487c Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Mon, 23 Mar 2026 11:20:07 -0400 Subject: [PATCH 13/16] ref(dashboards): Extract breakdown table matching and support multi-groupBy (#111245) Extract the inline logic that matches time series data to table rows in the breakdown legend into a dedicated `matchTimeSeriesToTableRow` function with tests. Tested with following dashboards image The new implementation uses `groupBy.every()` to match all groupBy keys, enabling legend breakdown for widgets with multiple group-by columns. It also handles type coercion between numeric table values and string groupBy values (e.g., `http.response_status_code: 200` in the table matching `"200"` from the backend) using Python's `str()` conversion rules. Changes: - New `matchTimeSeriestoTableRow.tsx` with the extracted matching logic and `toPythonString` helper - Tests covering: no groupBy, single groupBy, multiple groupBy, numeric-to-string coercion, no match - Removed the widget builder restriction that disabled legend breakdown for >1 group-by column - Removed unused `aggregates` variable from `visualizationWidget.tsx` Refs DAIN-1387 --------- Co-authored-by: Claude Opus 4.6 Co-authored-by: Claude Opus 4.6 --- static/app/utils/string/toPythonString.tsx | 23 ++++ .../components/legendTypeSelector.tsx | 44 ++----- .../matchTimeSeriesToTableRowValue.spec.tsx | 108 ++++++++++++++++++ .../matchTimeSeriesToTableRowValue.tsx | 32 ++++++ .../widgetCard/visualizationWidget.tsx | 30 ++--- 5 files changed, 182 insertions(+), 55 deletions(-) create mode 100644 static/app/utils/string/toPythonString.tsx create mode 100644 static/app/views/dashboards/widgetCard/matchTimeSeriesToTableRowValue.spec.tsx create mode 100644 static/app/views/dashboards/widgetCard/matchTimeSeriesToTableRowValue.tsx diff --git a/static/app/utils/string/toPythonString.tsx b/static/app/utils/string/toPythonString.tsx new file mode 100644 index 00000000000000..b36e9f5fe33c12 --- /dev/null +++ b/static/app/utils/string/toPythonString.tsx @@ -0,0 +1,23 @@ +/** + * Converts a JS value to a string matching Python's str() output. + * The backend converts group-by values using str(), so we need to + * match that behavior for comparisons. + */ +export function toPythonString(value: unknown): string { + if (typeof value === 'boolean') { + return value ? 'True' : 'False'; + } + if (Array.isArray(value)) { + const items = value.map(item => { + if (item === null || item === undefined) { + return 'None'; + } + if (typeof item === 'string') { + return `'${item}'`; + } + return String(item); + }); + return `[${items.join(', ')}]`; + } + return String(value); +} diff --git a/static/app/views/dashboards/widgetBuilder/components/legendTypeSelector.tsx b/static/app/views/dashboards/widgetBuilder/components/legendTypeSelector.tsx index 133c6b8cc9a1a6..7cf2d455d3711d 100644 --- a/static/app/views/dashboards/widgetBuilder/components/legendTypeSelector.tsx +++ b/static/app/views/dashboards/widgetBuilder/components/legendTypeSelector.tsx @@ -1,46 +1,26 @@ import {Checkbox} from '@sentry/scraps/checkbox'; import {Flex} from '@sentry/scraps/layout'; import {Text} from '@sentry/scraps/text'; -import {Tooltip} from '@sentry/scraps/tooltip'; import {t} from 'sentry/locale'; import {useWidgetBuilderContext} from 'sentry/views/dashboards/widgetBuilder/contexts/widgetBuilderContext'; import {BuilderStateAction} from 'sentry/views/dashboards/widgetBuilder/hooks/useWidgetBuilderState'; -import {FieldValueKind} from 'sentry/views/discover/table/types'; export function LegendTypeSelector() { const {state, dispatch} = useWidgetBuilderContext(); - const columns = state.fields?.filter(field => field.kind === FieldValueKind.FIELD); - // Currently, the legned breakdown is only supported with one or fewer group-by columns - // The logic to extract the right table data from the response is not yet implemented - const disabled = (columns?.length ?? 0) > 1; - return ( - - - { - dispatch({ - type: BuilderStateAction.SET_LEGEND_TYPE, - payload: e.target.checked ? 'breakdown' : undefined, - }); - }} - /> - {t('Show legend breakdown')} - - + + { + dispatch({ + type: BuilderStateAction.SET_LEGEND_TYPE, + payload: e.target.checked ? 'breakdown' : undefined, + }); + }} + /> + {t('Show legend breakdown')} + ); } diff --git a/static/app/views/dashboards/widgetCard/matchTimeSeriesToTableRowValue.spec.tsx b/static/app/views/dashboards/widgetCard/matchTimeSeriesToTableRowValue.spec.tsx new file mode 100644 index 00000000000000..a1940aa4ff8712 --- /dev/null +++ b/static/app/views/dashboards/widgetCard/matchTimeSeriesToTableRowValue.spec.tsx @@ -0,0 +1,108 @@ +import {TimeSeriesFixture} from 'sentry-fixture/timeSeries'; + +import {matchTimeSeriesToTableRowValue} from './matchTimeSeriesToTableRowValue'; + +describe('matchTimeSeriesToTableRowValue', () => { + it('returns the first row value when there is no groupBy', () => { + const result = matchTimeSeriesToTableRowValue({ + tableDataRows: [{id: '1', 'count()': 42}], + timeSeries: TimeSeriesFixture({yAxis: 'count()', groupBy: undefined}), + }); + + expect(result).toBe(42); + }); + + it('returns null when there are no rows and no groupBy', () => { + const result = matchTimeSeriesToTableRowValue({ + tableDataRows: [], + timeSeries: TimeSeriesFixture({yAxis: 'count()', groupBy: undefined}), + }); + + expect(result).toBeNull(); + }); + + it('matches a row by a single groupBy value', () => { + const result = matchTimeSeriesToTableRowValue({ + tableDataRows: [ + {id: '1', 'browser.name': 'Chrome', 'count()': 10}, + {id: '2', 'browser.name': 'Firefox', 'count()': 5}, + ], + timeSeries: TimeSeriesFixture({ + yAxis: 'count()', + groupBy: [{key: 'browser.name', value: 'Firefox'}], + }), + }); + + expect(result).toBe(5); + }); + + it('matches a row by multiple groupBy values', () => { + const result = matchTimeSeriesToTableRowValue({ + tableDataRows: [ + {id: '1', 'browser.name': 'Chrome', 'os.name': 'Windows', 'count()': 10}, + {id: '2', 'browser.name': 'Chrome', 'os.name': 'Mac', 'count()': 7}, + {id: '3', 'browser.name': 'Firefox', 'os.name': 'Windows', 'count()': 5}, + ], + timeSeries: TimeSeriesFixture({ + yAxis: 'count()', + groupBy: [ + {key: 'browser.name', value: 'Chrome'}, + {key: 'os.name', value: 'Mac'}, + ], + }), + }); + + expect(result).toBe(7); + }); + + it('returns null when no row matches the groupBy', () => { + const result = matchTimeSeriesToTableRowValue({ + tableDataRows: [{id: '1', 'browser.name': 'Chrome', 'count()': 10}], + timeSeries: TimeSeriesFixture({ + yAxis: 'count()', + groupBy: [{key: 'browser.name', value: 'Safari'}], + }), + }); + + expect(result).toBeNull(); + }); + + it('matches numeric table values to string groupBy values', () => { + const result = matchTimeSeriesToTableRowValue({ + tableDataRows: [ + {id: '1', 'http.response_status_code': 200, 'count()': 50}, + {id: '2', 'http.response_status_code': 404, 'count()': 3}, + ], + timeSeries: TimeSeriesFixture({ + yAxis: 'count()', + groupBy: [{key: 'http.response_status_code', value: '200'}], + }), + }); + + expect(result).toBe(50); + }); + + it('matches array groupBy values using Python str() format', () => { + const result = matchTimeSeriesToTableRowValue({ + tableDataRows: [ + {id: '1', tags: "['a', 'b']", 'count()': 10}, + {id: '2', tags: "['c', None]", 'count()': 20}, + ], + timeSeries: TimeSeriesFixture({ + yAxis: 'count()', + groupBy: [{key: 'tags', value: ['c', null]}], + }), + }); + + expect(result).toBe(20); + }); + + it('returns the first row value when groupBy is an empty array', () => { + const result = matchTimeSeriesToTableRowValue({ + tableDataRows: [{id: '1', 'count()': 42}], + timeSeries: TimeSeriesFixture({yAxis: 'count()', groupBy: []}), + }); + + expect(result).toBe(42); + }); +}); diff --git a/static/app/views/dashboards/widgetCard/matchTimeSeriesToTableRowValue.tsx b/static/app/views/dashboards/widgetCard/matchTimeSeriesToTableRowValue.tsx new file mode 100644 index 00000000000000..faf3bb7cc0af92 --- /dev/null +++ b/static/app/views/dashboards/widgetCard/matchTimeSeriesToTableRowValue.tsx @@ -0,0 +1,32 @@ +import type {TableDataRow} from 'sentry/utils/discover/discoverQuery'; +import {toPythonString} from 'sentry/utils/string/toPythonString'; +import type {TimeSeries} from 'sentry/views/dashboards/widgets/common/types'; + +/** + * Finds the value of a given yAxis in table data that matches the + * time series groupBy values. Used by the breakdown legend to display + * aggregated values alongside each series. + */ +export function matchTimeSeriesToTableRowValue({ + tableDataRows, + timeSeries, +}: { + tableDataRows: TableDataRow[]; + timeSeries: Pick; +}): number | null { + const {groupBy, yAxis} = timeSeries; + + if (!groupBy || groupBy.length === 0) { + // No groupBy means a single aggregated row + // Table results will be `[{aggregate1: 123}, {aggregate2: 345}]` + const row = tableDataRows[0]; + return (row?.[yAxis] as number) ?? null; + } + + // Finding a row that has the same group-by values as the time series + const matchedRow = tableDataRows.find(row => + groupBy.every(group => toPythonString(row[group.key]) === toPythonString(group.value)) + ); + + return (matchedRow?.[yAxis] as number) ?? null; +} diff --git a/static/app/views/dashboards/widgetCard/visualizationWidget.tsx b/static/app/views/dashboards/widgetCard/visualizationWidget.tsx index 9c58ce8c72d61f..1b956bf1afa749 100644 --- a/static/app/views/dashboards/widgetCard/visualizationWidget.tsx +++ b/static/app/views/dashboards/widgetCard/visualizationWidget.tsx @@ -29,6 +29,7 @@ import { getLinkedDashboardUrl, } from 'sentry/views/dashboards/utils/getLinkedDashboardUrl'; import {getChartType} from 'sentry/views/dashboards/utils/getWidgetExploreUrl'; +import {matchTimeSeriesToTableRowValue} from 'sentry/views/dashboards/widgetCard/matchTimeSeriesToTableRowValue'; import {transformWidgetSeriesToTimeSeries} from 'sentry/views/dashboards/widgetCard/transformWidgetSeriesToTimeSeries'; import {MISSING_DATA_MESSAGE} from 'sentry/views/dashboards/widgets/common/settings'; import type { @@ -209,7 +210,6 @@ function VisualizationWidgetContent({ const {selection} = usePageFilters(); const firstWidgetQuery = widget.queries[0]; - const aggregates = firstWidgetQuery?.aggregates ?? []; // All widget queries have the same aggregates const columns = firstWidgetQuery?.columns ?? []; // All widget queries have the same columns const timeSeriesWithPlottable = timeseriesResults @@ -279,33 +279,17 @@ function VisualizationWidgetContent({ return null; } - let value: number | null = null; const yAxis = timeSeries.yAxis; const firstColumnGroupByValue = timeSeries.groupBy?.find( groupBy => groupBy.key === firstColumn )?.value; - if (tableDataRows) { - // If there is one column, the table results will be an array with multiple elements - // [{column: 'value', aggregate: 123}, {column: 'value', aggregate: 123}] - if (columns.length === 1) { - if (firstColumnGroupByValue !== undefined && firstColumn) { - // for 20 series, this is only 20 x 20 lookups, which is negligible and worth it for code readability - value = - (tableDataRows.find( - row => row[firstColumn] === firstColumnGroupByValue - )?.[yAxis] as number) ?? null; - } - } - // If there is no columns, and only aggregates, the table result will be an array with a single element - // [{aggregate1: 123}, {aggregate2: 345}] - else if (columns.length === 0 && aggregates.length > 0) { - const row = tableDataRows[0]; - if (row) { - value = (row[yAxis] as number) ?? null; - } - } - } + const value = tableDataRows + ? matchTimeSeriesToTableRowValue({ + tableDataRows, + timeSeries, + }) + : null; const dataType = timeSeries.meta.valueType; const dataUnit = timeSeries.meta.valueUnit ?? undefined; const label = plottable?.label ?? timeSeries.yAxis; From c7e09832e4731296f0c1f4aa932c20182a1c8565 Mon Sep 17 00:00:00 2001 From: Ogi Date: Mon, 23 Mar 2026 16:28:12 +0100 Subject: [PATCH 14/16] fix(ai-insights): Use AIContentRenderer for conversation table tooltips (#111285) closes [TET-2129: Agree on where to use the new input output render](https://linear.app/getsentry/issue/TET-2129/agree-on-where-to-use-the-new-input-output-render) --- .../pages/conversations/components/conversationsTable.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/static/app/views/insights/pages/conversations/components/conversationsTable.tsx b/static/app/views/insights/pages/conversations/components/conversationsTable.tsx index 491dcd918498c1..c0e33a78cd5f68 100644 --- a/static/app/views/insights/pages/conversations/components/conversationsTable.tsx +++ b/static/app/views/insights/pages/conversations/components/conversationsTable.tsx @@ -33,6 +33,7 @@ import { type Conversation, type ConversationUser, } from 'sentry/views/insights/pages/conversations/hooks/useConversations'; +import {AIContentRenderer} from 'sentry/views/performance/newTraceDetails/traceDrawer/details/span/eapSections/aiContentRenderer'; interface ConversationsTableProps { openConversationViewDrawer: ReturnType< @@ -141,7 +142,7 @@ const CELL_MAX_CHARS = 256; function TooltipContent({text}: {text: string}) { return ( - + ); } From 92b79937e517c7053e269582cbc3db2a3a8e23df Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Mon, 23 Mar 2026 11:36:13 -0400 Subject: [PATCH 15/16] fix(autofix): Remove broken docs link from GitHub Copilot CTA (#111298) The "Read the docs" link in the GitHub Copilot integration CTA points to `docs.sentry.io/organization/integrations/github-copilot/` which returns a 404. Removes the link from both the installed and not-installed states. The rest of the CTA copy and functionality is unchanged. The link can be re-added once the docs page exists. Agent transcript: https://claudescope.sentry.dev/share/vG5lStfoMBbmZcJ3VliZion_bLqDYH39Gfq96F-HFBA --- .../autofix/githubCopilotIntegrationCta.tsx | 21 +++++-------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/static/app/components/events/autofix/githubCopilotIntegrationCta.tsx b/static/app/components/events/autofix/githubCopilotIntegrationCta.tsx index 060251cad397e7..38f2b214021fe7 100644 --- a/static/app/components/events/autofix/githubCopilotIntegrationCta.tsx +++ b/static/app/components/events/autofix/githubCopilotIntegrationCta.tsx @@ -2,12 +2,11 @@ import {useCallback} from 'react'; import {LinkButton} from '@sentry/scraps/button'; import {Container, Flex} from '@sentry/scraps/layout'; -import {ExternalLink} from '@sentry/scraps/link'; import {Heading, Text} from '@sentry/scraps/text'; import {organizationIntegrationsCodingAgents} from 'sentry/components/events/autofix/useAutofix'; import {Placeholder} from 'sentry/components/placeholder'; -import {t, tct} from 'sentry/locale'; +import {t} from 'sentry/locale'; import {PluginIcon} from 'sentry/plugins/components/pluginIcon'; import {trackAnalytics} from 'sentry/utils/analytics'; import {useQuery} from 'sentry/utils/queryClient'; @@ -75,13 +74,8 @@ export function GithubCopilotIntegrationCta() { - {tct( - 'Connect GitHub Copilot to hand off Seer root cause analysis to GitHub Copilot coding agent for seamless code fixes. [docsLink:Read the docs] to learn more.', - { - docsLink: ( - - ), - } + {t( + 'Connect GitHub Copilot to hand off Seer root cause analysis to GitHub Copilot coding agent for seamless code fixes.' )}
@@ -114,13 +108,8 @@ export function GithubCopilotIntegrationCta() { - {tct( - 'GitHub Copilot integration is installed. You can trigger GitHub Copilot from Issue Fix to create pull requests. [docsLink:Read the docs] to learn more.', - { - docsLink: ( - - ), - } + {t( + 'GitHub Copilot integration is installed. You can trigger GitHub Copilot from Issue Fix to create pull requests.' )} From a1867452018af283741ff1ce2aeccf676762bb42 Mon Sep 17 00:00:00 2001 From: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com> Date: Mon, 23 Mar 2026 11:45:34 -0400 Subject: [PATCH 16/16] feat(dashboards): Improvements to Dashboard generation tracking (#111199) - tags metric with org slug - captures error on failed validation - adds a couple new refs to guard against duplicate metrics on the same generation cycle, or metrics on refresh --- .../app/views/dashboards/createFromSeer.tsx | 40 ++++++++++++++----- 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/static/app/views/dashboards/createFromSeer.tsx b/static/app/views/dashboards/createFromSeer.tsx index 72fe1061fc9b35..ae75a832a2d8f2 100644 --- a/static/app/views/dashboards/createFromSeer.tsx +++ b/static/app/views/dashboards/createFromSeer.tsx @@ -112,11 +112,22 @@ async function validateDashboardAndRecordMetrics( try { await validateDashboard(organization.slug, newDashboard); Sentry.metrics.count('dashboards.seer.validation', 1, { - attributes: {status: 'success', ...(seerRunId ? {seer_run_id: seerRunId} : {})}, + attributes: { + status: 'success', + organization_slug: organization.slug, + ...(seerRunId ? {seer_run_id: seerRunId} : {}), + }, }); } catch (error) { Sentry.metrics.count('dashboards.seer.validation', 1, { - attributes: {status: 'failure', ...(seerRunId ? {seer_run_id: seerRunId} : {})}, + attributes: { + status: 'failure', + organization_slug: organization.slug, + ...(seerRunId ? {seer_run_id: seerRunId} : {}), + }, + }); + Sentry.captureException(error, { + tags: {seer_run_id: seerRunId}, }); } } @@ -148,6 +159,10 @@ export default function CreateFromSeer() { // validation errors. const completedAtRef = useRef(null); + // Additional guards to prevent duplicate metrics recording and on reload + const hasValidatedRef = useRef(false); + const hasSeenNonTerminalRef = useRef(false); + const {data, isError} = useApiQuery( makeSeerExplorerQueryKey(organization.slug, seerRunId), { @@ -155,9 +170,6 @@ export default function CreateFromSeer() { retry: false, enabled: !!seerRunId && hasFeature, refetchInterval: query => { - if (isUpdating) { - return POLL_INTERVAL_MS; - } const status = query.state.data?.[0]?.session?.status; if (statusIsTerminal(status)) { if (completedAtRef.current === null) { @@ -166,11 +178,17 @@ export default function CreateFromSeer() { if (Date.now() - completedAtRef.current < POST_COMPLETE_POLL_MS) { return POLL_INTERVAL_MS; } - validateDashboardAndRecordMetrics(organization, dashboard, seerRunId); + if (!hasValidatedRef.current && hasSeenNonTerminalRef.current) { + hasValidatedRef.current = true; + validateDashboardAndRecordMetrics(organization, dashboard, seerRunId); + } return false; } - // Status left "completed" (hook triggered a re-run), reset - completedAtRef.current = null; + if (status !== undefined && !statusIsTerminal(status)) { + hasSeenNonTerminalRef.current = true; + hasValidatedRef.current = false; + completedAtRef.current = null; + } return POLL_INTERVAL_MS; }, } @@ -186,7 +204,10 @@ export default function CreateFromSeer() { } const prevUpdatedAt = prevSessionStatusRef.current.updated_at; const prevStatus = prevSessionStatusRef.current.status; - prevSessionStatusRef.current = {status: sessionStatus, updated_at: sessionUpdatedAt}; + prevSessionStatusRef.current = { + status: sessionStatus, + updated_at: sessionUpdatedAt, + }; const isTerminal = statusIsTerminal(sessionStatus); const wasTerminal = statusIsTerminal(prevStatus); @@ -254,6 +275,7 @@ export default function CreateFromSeer() { } setisUpdating(true); completedAtRef.current = null; + hasValidatedRef.current = false; try { const queryKey = makeSeerExplorerQueryKey(organization.slug, seerRunId); const {url} = parseQueryKey(queryKey);