diff --git a/backend/analytics_server/mhq/api/request_utils.py b/backend/analytics_server/mhq/api/request_utils.py index 0ffc819a3..f7fddf8ca 100644 --- a/backend/analytics_server/mhq/api/request_utils.py +++ b/backend/analytics_server/mhq/api/request_utils.py @@ -7,6 +7,7 @@ from stringcase import snakecase from voluptuous import Invalid from werkzeug.exceptions import BadRequest +from mhq.utils.log import LOG from mhq.store.models.code.repository import TeamRepos from mhq.service.code.models.org_repo import RawTeamOrgRepo from mhq.store.models.code import WorkflowFilter, CodeProvider @@ -82,20 +83,24 @@ def coerce_workflow_filter(filter_data: str) -> WorkflowFilter: def coerce_org_repo(repo: Dict[str, str]) -> RawTeamOrgRepo: - return RawTeamOrgRepo( - team_id=repo.get("team_id"), - provider=CodeProvider(repo.get("provider")), - name=repo.get("name"), - org_name=repo.get("org"), - slug=repo.get("slug"), - idempotency_key=repo.get("idempotency_key"), - default_branch=repo.get("default_branch"), - deployment_type=( - TeamReposDeploymentType(repo.get("deployment_type")) - if repo.get("deployment_type") - else TeamReposDeploymentType.PR_MERGE - ), - ) + try: + return RawTeamOrgRepo( + team_id=repo.get("team_id"), + provider=CodeProvider(repo.get("provider")), + name=repo.get("name"), + org_name=repo.get("org"), + slug=repo.get("slug"), + idempotency_key=repo.get("idempotency_key"), + default_branch=repo.get("default_branch"), + deployment_type=( + TeamReposDeploymentType(repo.get("deployment_type")) + if repo.get("deployment_type") + else TeamReposDeploymentType.PR_MERGE + ), + ) + except Exception as e: + LOG.error(f"Error creating RawTeamOrgRepo with data: {repo}. Error: {str(e)}") + raise def coerce_org_repos(repos: List[Dict[str, str]]) -> List[RawTeamOrgRepo]: diff --git a/backend/analytics_server/mhq/exapi/bitbucket.py b/backend/analytics_server/mhq/exapi/bitbucket.py new file mode 100644 index 000000000..e53025fda --- /dev/null +++ b/backend/analytics_server/mhq/exapi/bitbucket.py @@ -0,0 +1,126 @@ +import requests +from typing import Optional, Dict, Any + +from mhq.utils.log import LOG +from mhq.exapi.models.bitbucket import BitbucketRepo + +class BitbucketApiService: + def __init__(self, access_token: str): + self._token = access_token + self.base_url = "https://api.bitbucket.org/2.0" + self.headers = {"Authorization": f"Basic {self._token}"} + self.session = requests.Session() + self.session.headers.update(self.headers) + + def check_pat(self) -> bool: + """ + Checks if Personal Access Token is valid. + + Returns: + bool: True if PAT is valid, False otherwise + + Raises: + requests.RequestException: If the request fails + """ + url = f"{self.base_url}/user" + try: + response = self.session.get(url, timeout=30) + return response.status_code == 200 + except requests.RequestException as e: + LOG.error(f"PAT validation failed: {e}") + raise requests.RequestException(f"PAT validation failed: {e}") + + def _handle_error(self, response: requests.Response) -> None: + """ + Handle HTTP error responses from Bitbucket API. + + Args: + response: The HTTP response object + + Raises: + requests.HTTPError: If response status code is not 200 + """ + if response.status_code != 200: + try: + error_data = response.json() + error = error_data.get("error", "Unknown error") + message = error_data.get("message", "No message provided") + except ValueError: + error = "Invalid response format" + message = response.text or "No error details available" + + error_msg = f"Request failed with status {response.status_code}: {error} - {message}" + LOG.error(error_msg) + raise requests.HTTPError(error_msg) + + def get_workspace_repos(self, workspace: str, repo_slug: str) -> BitbucketRepo: + """ + Get repository information for a specific workspace and repository. + + Args: + workspace: The workspace name + repo_slug: The repository slug + + Returns: + BitbucketRepo: Repository information object + + Raises: + requests.HTTPError: If the request fails + requests.RequestException: If the request encounters an error + """ + url = f"{self.base_url}/repositories/{workspace}/{repo_slug}" + try: + response = self.session.get(url, timeout=30) + self._handle_error(response) + repo = response.json() + return BitbucketRepo(repo) + except requests.RequestException as e: + LOG.error(f"Failed to get repository {workspace}/{repo_slug}: {e}") + raise + + def get_repo_contributors(self, workspace: str, repo_slug: str) -> Dict[str,int]: + """ + Get all contributors for a repository with their contribution counts. + + Args: + workspace: The workspace name + repo_slug: The repository slug + + Returns: + dict: Dictionary with contributor names as keys and contribution counts as values + + Raises: + requests.HTTPError: If the request fails + requests.RequestException: If the request encounters an error + """ + url = f"{self.base_url}/repositories/{workspace}/{repo_slug}/commits" + contributors = {} + + try: + while url: + response = self.session.get(url, timeout=30) + self._handle_error(response) + + data = response.json() + commits = data.get('values', []) + + for commit in commits: + author = commit.get('author', {}) + user = author.get('user', {}) + display_name = user.get('display_name', 'Unknown') + + if display_name in contributors: + contributors[display_name] += 1 + else: + contributors[display_name] = 1 + + url = data.get('next') + + return contributors + + except requests.RequestException as e: + LOG.error(f"Failed to get contributors for {workspace}/{repo_slug}: {e}") + raise + + + diff --git a/backend/analytics_server/mhq/exapi/models/bitbucket.py b/backend/analytics_server/mhq/exapi/models/bitbucket.py new file mode 100644 index 000000000..47e8726f6 --- /dev/null +++ b/backend/analytics_server/mhq/exapi/models/bitbucket.py @@ -0,0 +1,129 @@ +from dataclasses import dataclass +from datetime import datetime +from enum import Enum +from typing import Dict, List, Optional + +from mhq.utils.time import dt_from_iso_time_string + + +@dataclass +class BitbucketRepo: + name: str + org_name: str + default_branch: str + idempotency_key: str + slug: str + description: str + web_url: str + languages: Optional[Dict] = None + contributors: Optional[List] = None + + def __init__(self, repo: Dict): + self.name = repo.get("name", "") + workspace = repo.get("workspace", {}) + self.org_name = workspace.get("slug", workspace.get("name", "")) + self.default_branch = repo.get("mainbranch", {}).get("name", "main") + self.idempotency_key = str(repo.get("uuid", "")) + self.slug = repo.get("slug", "") + self.description = repo.get("description", "") + self.web_url = repo.get("links", {}).get("html", {}).get("href", "") + self.languages = repo.get("language") + + def __hash__(self): + return hash(self.idempotency_key) + + +class BitbucketPRState(Enum): + OPEN = "OPEN" + MERGED = "MERGED" + SUPERSEDED = "SUPERSEDED" + DECLINED = "DECLINED" + + +@dataclass +class BitbucketPR: + number: int + title: str + url: str + author: str + reviewers: List[str] + state: BitbucketPRState + base_branch: str + head_branch: str + data: Dict + created_at: datetime + updated_at: datetime + merged_at: Optional[datetime] = None + closed_at: Optional[datetime] = None + merge_commit_sha: Optional[str] = None + + def __init__(self, pr: Dict): + self.number = pr.get("id", 0) + self.title = pr.get("title", "") + self.url = pr.get("links", {}).get("html", {}).get("href", "") + self.author = pr.get("author", {}).get("display_name", "") + self.reviewers = [ + reviewer.get("display_name", "") + for reviewer in pr.get("reviewers", []) + ] + state_str = pr.get("state", "OPEN").upper() + try: + self.state = BitbucketPRState(state_str) + except ValueError: + + self.state = BitbucketPRState.OPEN + self.base_branch = pr.get("destination", {}).get("branch", {}).get("name", "") + self.head_branch = pr.get("source", {}).get("branch", {}).get("name", "") + self.data = pr + self.created_at = dt_from_iso_time_string(pr.get("created_on", "")) or datetime.now() + self.updated_at = dt_from_iso_time_string(pr.get("updated_on", "")) or datetime.now() + + # Parse merge/close dates + if pr.get("merge_commit"): + self.merged_at = self.updated_at + self.merge_commit_sha = pr.get("merge_commit", {}).get("hash", "") + + if self.state in [BitbucketPRState.DECLINED, BitbucketPRState.SUPERSEDED]: + self.closed_at = self.updated_at + + +@dataclass +class BitbucketCommit: + hash: str + message: str + url: str + data: Dict + author_email: str + created_at: datetime + + def __init__(self, commit: Dict): + self.hash = commit.get("hash", "") + self.message = commit.get("message", "") + self.url = commit.get("links", {}).get("html", {}).get("href", "") + self.data = commit + self.author_email = commit.get("author", {}).get("raw", "").split("<")[-1].replace(">", "").strip() + self.created_at = dt_from_iso_time_string(commit.get("date", "")) or datetime.now() + + +class BitbucketReviewState(Enum): + APPROVED = "approved" + CHANGES_REQUESTED = "changes_requested" + COMMENTED = "commented" + + +@dataclass +class BitbucketReview: + id: str + state: BitbucketReviewState + created_at: datetime + actor_username: str + data: Dict + idempotency_key: str + + def __init__(self, review: Dict): + self.id = str(review.get("uuid", "")) + self.state = BitbucketReviewState(review.get("state", "commented")) + self.created_at = dt_from_iso_time_string(review.get("date", "")) or datetime.now() + self.actor_username = review.get("user", {}).get("display_name", "") + self.data = review + self.idempotency_key = self.id \ No newline at end of file diff --git a/backend/analytics_server/mhq/service/code/integration.py b/backend/analytics_server/mhq/service/code/integration.py index e79a1792c..b4085b659 100644 --- a/backend/analytics_server/mhq/service/code/integration.py +++ b/backend/analytics_server/mhq/service/code/integration.py @@ -6,6 +6,7 @@ CODE_INTEGRATION_BUCKET = [ UserIdentityProvider.GITHUB.value, UserIdentityProvider.GITLAB.value, + UserIdentityProvider.BITBUCKET.value ] diff --git a/backend/analytics_server/mhq/service/code/sync/etl_bitbucket_handler.py b/backend/analytics_server/mhq/service/code/sync/etl_bitbucket_handler.py new file mode 100644 index 000000000..5ad88a6f6 --- /dev/null +++ b/backend/analytics_server/mhq/service/code/sync/etl_bitbucket_handler.py @@ -0,0 +1,490 @@ +import uuid +from datetime import datetime +from typing import List, Dict, Optional, Tuple, Set + +import pytz + +from mhq.exapi.models.bitbucket import BitbucketRepo, BitbucketPR, BitbucketCommit, BitbucketReview, BitbucketPRState +from mhq.exapi.bitbucket import BitbucketApiService +from mhq.service.code.sync.etl_code_analytics import CodeETLAnalyticsService +from mhq.service.code.sync.etl_provider_handler import CodeProviderETLHandler +from mhq.service.code.sync.revert_pr_bitbucket_sync import ( + RevertPRsBitbucketSyncHandler, + get_revert_prs_bitbucket_sync_handler, +) +from mhq.store.models import UserIdentityProvider +from mhq.store.models.code import ( + OrgRepo, + PullRequestState, + PullRequest, + PullRequestCommit, + PullRequestEvent, + PullRequestEventType, + PullRequestEventState, + PullRequestRevertPRMapping, + CodeProvider, +) +from mhq.store.repos.code import CodeRepoService +from mhq.store.repos.core import CoreRepoService +from mhq.utils.log import LOG +from mhq.utils.time import time_now, ISO_8601_DATE_FORMAT + +PR_PROCESSING_CHUNK_SIZE = 100 + + +class BitbucketETLHandler(CodeProviderETLHandler): + """Handler for Bitbucket ETL operations.""" + + def __init__( + self, + org_id: str, + bitbucket_api_service: BitbucketApiService, + code_repo_service: CodeRepoService, + code_etl_analytics_service: CodeETLAnalyticsService, + bitbucket_revert_pr_sync_handler: RevertPRsBitbucketSyncHandler, + ): + self.org_id = org_id + self._api = bitbucket_api_service + self.code_repo_service = code_repo_service + self.code_etl_analytics_service : CodeETLAnalyticsService = ( + code_etl_analytics_service + ) + self.provider = CodeProvider.BITBUCKET.value + self.bitbucket_revert_pr_sync_handler: RevertPRsBitbucketSyncHandler = ( + bitbucket_revert_pr_sync_handler + ) + + def check_pat_validity(self) -> bool: + """Check if the Bitbucket Personal Access Token is valid. + + Returns: + bool: True if the PAT is valid. + + Raises: + Exception: If the Bitbucket credentials are invalid. + """ + is_valid = self._api.check_pat() + if not is_valid: + raise Exception("Bitbucket credentials are invalid. Please check username or password.") + return is_valid + + def get_org_repos(self, org_repos: List[OrgRepo]) -> List[OrgRepo]: + """Get organization repositories from Bitbucket API. + + Args: + org_repos: List of organization repositories to fetch. + + Returns: + List of processed OrgRepo objects. + """ + bitbucket_repos: List[BitbucketRepo] = [] + for org_repo in org_repos: + workspace = org_repo.org_name + repo_slug = org_repo.name + try: + bitbucket_repo = self._api.get_workspace_repos(workspace, repo_slug) + bitbucket_repos.append(bitbucket_repo) + except Exception as e: + LOG.error(f"Error getting Bitbucket repository {workspace}/{repo_slug}: {e}") + continue + repo_idempotency_key_org_repo_map = { + org_repo.idempotency_key: org_repo for org_repo in org_repos + } + + processed_repos = [] + for bitbucket_repo in bitbucket_repos: + org_repo = repo_idempotency_key_org_repo_map.get(str(bitbucket_repo.idempotency_key)) + if org_repo is not None: + processed_repo = self._process_bitbucket_repo(org_repo, bitbucket_repo) + if processed_repo is not None: + processed_repos.append(processed_repo) + + return processed_repos + + def _process_bitbucket_repo( + self, org_repo: OrgRepo, bitbucket_repo: BitbucketRepo + ) -> OrgRepo: + """Process a Bitbucket repository into an OrgRepo object. + + Args: + org_repo: Original organization repository. + bitbucket_repo: Bitbucket repository data. + + Returns: + Processed OrgRepo object. + """ + processed_repo = OrgRepo() + processed_repo.id = org_repo.id + processed_repo.org_id = self.org_id + processed_repo.name = bitbucket_repo.name + processed_repo.provider = self.provider + processed_repo.org_name = bitbucket_repo.org_name + processed_repo.default_branch = bitbucket_repo.default_branch + processed_repo.language = bitbucket_repo.languages + processed_repo.contributors = self._api.get_repo_contributors( + bitbucket_repo.org_name, bitbucket_repo.name + ) + processed_repo.idempotency_key = str(bitbucket_repo.idempotency_key) + processed_repo.slug = bitbucket_repo.name + processed_repo.updated_at = time_now() + return processed_repo + + def get_repo_pull_requests_data( + self, org_repo: OrgRepo, bookmark: datetime + ) -> Tuple[List[PullRequest], List[PullRequestCommit], List[PullRequestEvent]]: + """Get all pull requests, their commits and events for a repository. + + Args: + org_repo: OrgRepo object to get pull requests for + bookmark: Bookmark date to get all pull requests after this date + + Returns: + Tuple of pull requests, their commits and events + """ + workspace = org_repo.org_name + repo_slug = org_repo.name + + try: + bitbucket_prs: List[BitbucketPR] = self._api.get_pull_requests( + workspace, repo_slug, state="all" + ) + except Exception as e: + LOG.error(f"Error getting pull requests for {workspace}/{repo_slug}: {e}") + return [], [], [] + + prs_to_process = [] + for pr in bitbucket_prs: + if pr.updated_at.replace(tzinfo=pytz.UTC) <= bookmark: + continue + + state_changed_at = pr.merged_at if pr.merged_at else pr.closed_at + if ( + pr.state != BitbucketPRState.OPEN.value + and state_changed_at + and state_changed_at.replace(tzinfo=pytz.UTC) < bookmark + ): + continue + + prs_to_process.append(pr) + + if not prs_to_process: + LOG.info("Nothing to process 🎉") + return [], [], [] + + pull_requests: List[PullRequest] = [] + pr_commits: List[PullRequestCommit] = [] + pr_events: List[PullRequestEvent] = [] + prs_added: Set[int] = set() + + for bitbucket_pr in prs_to_process: + if bitbucket_pr.number in prs_added: + continue + + pr_model, event_models, pr_commit_models = self.process_pr( + str(org_repo.id), bitbucket_pr, workspace, repo_slug + ) + pull_requests.append(pr_model) + pr_events += event_models + pr_commits += pr_commit_models + prs_added.add(bitbucket_pr.number) + + return pull_requests, pr_commits, pr_events + + def process_pr( + self, repo_id: str, pr: BitbucketPR, workspace: str, repo_slug: str + ) -> Tuple[PullRequest, List[PullRequestEvent], List[PullRequestCommit]]: + """Process a single pull request and return its model with events and commits. + + Args: + repo_id: Repository ID + pr: BitbucketPR object + workspace: Bitbucket workspace name + repo_slug: Repository slug + + Returns: + Tuple of PR model, events, and commits + """ + existing_pr_model: Optional[PullRequest] = self.code_repo_service.get_repo_pr_by_number( + repo_id, pr.number + ) + pr_event_model_list: List[PullRequestEvent] = ( + self.code_repo_service.get_pr_events(existing_pr_model) if existing_pr_model else [] + ) + + try: + reviews: List[BitbucketReview] = self._api.get_pr_reviews( + workspace, repo_slug, pr.number + ) + diff_stats = self._api.get_pr_diff_stats(workspace, repo_slug, pr.number) + except Exception as e: + LOG.error(f"Error getting PR details for {workspace}/{repo_slug}/pull/{pr.number}: {e}") + reviews = [] + diff_stats = {"additions": 0, "deletions": 0, "changed_files": 0} + + pr_model: PullRequest = self._to_pr_model( + pr, existing_pr_model, repo_id, len(reviews), diff_stats + ) + pr_events_model_list: List[PullRequestEvent] = self._to_pr_events( + reviews, pr_model, pr_event_model_list + ) + + pr_commits_model_list: List[PullRequestCommit] = [] + # Get commits for all PRs, not just merged ones, to calculate proper analytics + try: + commits: List[BitbucketCommit] = self._api.get_pr_commits( + workspace, repo_slug, pr.number + ) + pr_commits_model_list = self._to_pr_commits(commits, pr_model) + + # Update the commit count in PR meta + if pr_model.meta and "code_stats" in pr_model.meta: + pr_model.meta["code_stats"]["commits"] = len(pr_commits_model_list) + + except Exception as e: + LOG.error(f"Error getting commits for PR {pr.number}: {e}") + # Set commit count to 0 if we can't get commits + if pr_model.meta and "code_stats" in pr_model.meta: + pr_model.meta["code_stats"]["commits"] = 0 + + pr_model = self.code_etl_analytics_service.create_pr_metrics( + pr_model, pr_events_model_list, pr_commits_model_list + ) + + return pr_model, pr_events_model_list, pr_commits_model_list + + def get_revert_prs_mapping( + self, prs: List[PullRequest] + ) -> List[PullRequestRevertPRMapping]: + """Get revert PR mappings for the given PRs. + + Args: + prs: List of PullRequest objects + + Returns: + List of PullRequestRevertPRMapping objects + """ + return self.bitbucket_revert_pr_sync_handler(prs) + + def _to_pr_model( + self, + pr: BitbucketPR, + pr_model: Optional[PullRequest], + repo_id: str, + review_comments: int = 0, + diff_stats: Optional[Dict[str, int]] = None, + ) -> PullRequest: + """Convert BitbucketPR to PullRequest model. + + Args: + pr: BitbucketPR object + pr_model: Existing PullRequest model if any + repo_id: Repository ID + review_comments: Number of review comments + diff_stats: Diff statistics dict + + Returns: + PullRequest model + """ + if diff_stats is None: + diff_stats = {"additions": 0, "deletions": 0, "changed_files": 0} + + state = self._get_state(pr) + pr_id = pr_model.id if pr_model else uuid.uuid4() + state_changed_at = None + + if state != PullRequestState.OPEN: + state_changed_at = ( + pr.merged_at.replace(tzinfo=pytz.UTC) + if pr.merged_at + else pr.closed_at.replace(tzinfo=pytz.UTC) if pr.closed_at + else None + ) + + merge_commit_sha: Optional[str] = self._get_merge_commit_sha(pr.data, state) + + pr_model = PullRequest() + pr_model.id = pr_id + pr_model.number = str(pr.number) + pr_model.title = pr.title + pr_model.url = pr.url + pr_model.created_at = pr.created_at.replace(tzinfo=pytz.UTC) + pr_model.updated_at = pr.updated_at.replace(tzinfo=pytz.UTC) + pr_model.state_changed_at = state_changed_at + pr_model.state = state + pr_model.base_branch = pr.base_branch + pr_model.head_branch = pr.head_branch + pr_model.author = pr.author + pr_model.repo_id = repo_id + pr_model.data = pr.data + pr_model.requested_reviews = pr.reviewers + pr_model.meta = dict( + code_stats=dict( + commits=0, # This will be updated when we get actual commit count + additions=diff_stats.get("additions", 0), + deletions=diff_stats.get("deletions", 0), + changed_files=diff_stats.get("changed_files", 0), + comments=review_comments, + ), + user_profile=dict(username=pr.author), + ) + pr_model.provider = UserIdentityProvider.BITBUCKET.value + pr_model.merge_commit_sha = merge_commit_sha + + return pr_model + + @staticmethod + def _get_merge_commit_sha(raw_data: Dict, state: PullRequestState) -> Optional[str]: + """Extract merge commit SHA from raw data. + + Args: + raw_data: Raw PR data from Bitbucket + state: PR state + + Returns: + Merge commit SHA if available + """ + if state != PullRequestState.MERGED: + return None + + merge_commit = raw_data.get("merge_commit") + if merge_commit: + return merge_commit.get("hash") + + return None + + @staticmethod + def _get_state(pr: BitbucketPR) -> PullRequestState: + """Convert Bitbucket PR state to internal PR state. + + Args: + pr: BitbucketPR object + + Returns: + PullRequestState enum value + """ + if pr.state == BitbucketPRState.MERGED: + return PullRequestState.MERGED + elif pr.state in [BitbucketPRState.DECLINED, BitbucketPRState.SUPERSEDED]: + return PullRequestState.CLOSED + else: + return PullRequestState.OPEN + + @staticmethod + def _map_bitbucket_review_state_to_pr_event_state(review_state: str) -> str: + """Map Bitbucket review state to internal PullRequestEventState. + + Args: + review_state: Bitbucket review state + + Returns: + Internal PullRequestEventState value + """ + from mhq.exapi.models.bitbucket import BitbucketReviewState + + if review_state == BitbucketReviewState.APPROVED.value: + return PullRequestEventState.APPROVED.value + elif review_state == BitbucketReviewState.CHANGES_REQUESTED.value: + return PullRequestEventState.CHANGES_REQUESTED.value + else: + return PullRequestEventState.COMMENTED.value + + @staticmethod + def _to_pr_events( + reviews: List[BitbucketReview], + pr_model: PullRequest, + pr_events_model: List[PullRequestEvent], + ) -> List[PullRequestEvent]: + """Convert Bitbucket reviews to PullRequestEvent models. + + Args: + reviews: List of BitbucketReview objects + pr_model: PullRequest model + pr_events_model: Existing PR events + + Returns: + List of PullRequestEvent models + """ + pr_events: List[PullRequestEvent] = [] + pr_event_id_map = {event.idempotency_key: event.id for event in pr_events_model} + + for review in reviews: + if not review.created_at: + continue + + review_data = review.data.copy() + review_data["state"] = BitbucketETLHandler._map_bitbucket_review_state_to_pr_event_state( + review.state.value + ) + + pr_event = PullRequestEvent() + pr_event.id = pr_event_id_map.get(review.idempotency_key, uuid.uuid4()) + pr_event.pull_request_id = str(pr_model.id) + pr_event.type = PullRequestEventType.REVIEW.value + pr_event.data = review_data + pr_event.created_at = review.created_at.replace(tzinfo=pytz.UTC) + pr_event.idempotency_key = review.idempotency_key + pr_event.org_repo_id = pr_model.repo_id + pr_event.actor_username = review.actor_username + pr_events.append(pr_event) + return pr_events + + def _to_pr_commits( + self, + commits: List[BitbucketCommit], + pr_model: PullRequest, + ) -> List[PullRequestCommit]: + """Convert Bitbucket commits to PullRequestCommit models. + + Args: + commits: List of BitbucketCommit objects + pr_model: PullRequest model + + Returns: + List of PullRequestCommit models + """ + pr_commits: List[PullRequestCommit] = [] + + for commit in commits: + pr_commit = PullRequestCommit() + pr_commit.hash = commit.hash + pr_commit.pull_request_id = str(pr_model.id) + pr_commit.url = commit.url + pr_commit.data = commit.data + pr_commit.message = commit.message + pr_commit.author = commit.author_email + pr_commit.created_at = commit.created_at.replace(tzinfo=pytz.UTC) + pr_commit.org_repo_id = pr_model.repo_id + pr_commits.append(pr_commit) + return pr_commits + + +def _get_access_token(org_id: str) -> Optional[str]: + """Retrieve access token for the given organization.""" + core_repo_service = CoreRepoService() + access_token = core_repo_service.get_access_token( + org_id, UserIdentityProvider.BITBUCKET + ) + + if not access_token: + LOG.error( + f"Access token not found for org {org_id} and provider " + f"{UserIdentityProvider.BITBUCKET.value}" + ) + + return access_token + + +def get_bitbucket_etl_handler(org_id: str) -> BitbucketETLHandler: + """Factory function to create a BitbucketETLHandler instance.""" + access_token = _get_access_token(org_id) + + if not access_token: + raise Exception(f"Access token not found for org {org_id} and provider {UserIdentityProvider.BITBUCKET.value}") + + return BitbucketETLHandler( + org_id=org_id, + bitbucket_api_service=BitbucketApiService(access_token), + code_repo_service=CodeRepoService(), + code_etl_analytics_service=CodeETLAnalyticsService(), + bitbucket_revert_pr_sync_handler=get_revert_prs_bitbucket_sync_handler(), + ) \ No newline at end of file diff --git a/backend/analytics_server/mhq/service/code/sync/etl_code_factory.py b/backend/analytics_server/mhq/service/code/sync/etl_code_factory.py index 327bb92c8..60c876b1c 100644 --- a/backend/analytics_server/mhq/service/code/sync/etl_code_factory.py +++ b/backend/analytics_server/mhq/service/code/sync/etl_code_factory.py @@ -1,5 +1,7 @@ +from mhq.utils.log import LOG from mhq.service.code.sync.etl_gitlab_handler import get_gitlab_etl_handler from mhq.service.code.sync.etl_github_handler import get_github_etl_handler +from mhq.service.code.sync.etl_bitbucket_handler import get_bitbucket_etl_handler from mhq.service.code.sync.etl_provider_handler import CodeProviderETLHandler from mhq.store.models.code import CodeProvider @@ -15,4 +17,7 @@ def __call__(self, provider: str) -> CodeProviderETLHandler: if provider == CodeProvider.GITLAB.value: return get_gitlab_etl_handler(self.org_id) + if provider == CodeProvider.BITBUCKET.value: + return get_bitbucket_etl_handler(self.org_id) + raise NotImplementedError(f"Unknown provider - {provider}") diff --git a/backend/analytics_server/mhq/service/code/sync/revert_pr_bitbucket_sync.py b/backend/analytics_server/mhq/service/code/sync/revert_pr_bitbucket_sync.py new file mode 100644 index 000000000..2bd65041f --- /dev/null +++ b/backend/analytics_server/mhq/service/code/sync/revert_pr_bitbucket_sync.py @@ -0,0 +1,191 @@ +import re +from datetime import datetime +from typing import List, Set, Dict, Optional + +from mhq.store.models.code import ( + PullRequest, + PullRequestRevertPRMapping, + PullRequestRevertPRMappingActorType, +) +from mhq.store.repos.code import CodeRepoService +from mhq.utils.time import time_now + + +class RevertPRsBitbucketSyncHandler: + def __init__( + self, + code_repo_service: CodeRepoService, + ): + self.code_repo_service = code_repo_service + + def __call__(self, *args, **kwargs): + return self.process_revert_prs(*args, **kwargs) + + def process_revert_prs( + self, prs: List[PullRequest] + ) -> List[PullRequestRevertPRMapping]: + revert_prs: List[PullRequest] = [] + original_prs: List[PullRequest] = [] + + for pr in prs: + pr_number = ( + self._get_revert_pr_number(pr.head_branch) if pr.head_branch else None + ) + if pr_number is None: + original_prs.append(pr) + else: + revert_prs.append(pr) + + mappings_of_revert_prs = self._get_revert_pr_mapping_for_revert_prs(revert_prs) + mappings_of_original_prs = self._get_revert_pr_mapping_for_original_prs( + original_prs + ) + revert_pr_mappings = set(mappings_of_original_prs + mappings_of_revert_prs) + + return list(revert_pr_mappings) + + def _get_revert_pr_mapping_for_original_prs( + self, prs: List[PullRequest] + ) -> List[PullRequestRevertPRMapping]: + """ + This function takes a list of PRs and for each PR it tries to + find if that pr has been reverted and by which PR. It is done + by taking repo_id and the pr_number and searching for the + string 'revert-[pr-number]' in the head branch. + """ + + repo_ids: Set[str] = set() + repo_id_to_pr_number_to_id_map: Dict[str, Dict[str, str]] = {} + pr_numbers_match_strings: List[str] = [] + + for pr in prs: + pr_numbers_match_strings.append(f"revert-{pr.number}") + repo_ids.add(str(pr.repo_id)) + + if str(pr.repo_id) not in repo_id_to_pr_number_to_id_map: + repo_id_to_pr_number_to_id_map[str(pr.repo_id)] = {} + + repo_id_to_pr_number_to_id_map[str(pr.repo_id)][str(pr.number)] = pr.id + + if len(pr_numbers_match_strings) == 0: + return [] + + revert_prs: List[PullRequest] = ( + self.code_repo_service.get_prs_by_head_branch_match_strings( + list(repo_ids), pr_numbers_match_strings + ) + ) + + revert_pr_mappings: List[PullRequestRevertPRMapping] = [] + + for rev_pr in revert_prs: + original_pr_number = self._get_revert_pr_number(rev_pr.head_branch) + if original_pr_number is None: + continue + + repo_key_exists = repo_id_to_pr_number_to_id_map.get(str(rev_pr.repo_id)) + if repo_key_exists is None: + continue + + original_pr_id = repo_id_to_pr_number_to_id_map[str(rev_pr.repo_id)].get( + str(original_pr_number) + ) + if original_pr_id is None: + continue + + revert_pr_mp = PullRequestRevertPRMapping() + revert_pr_mp.pr_id = rev_pr.id + revert_pr_mp.actor_type = PullRequestRevertPRMappingActorType.SYSTEM + revert_pr_mp.actor = None + revert_pr_mp.reverted_pr = original_pr_id + revert_pr_mp.updated_at = time_now() + revert_pr_mappings.append(revert_pr_mp) + + return revert_pr_mappings + + def _get_revert_pr_mapping_for_revert_prs( + self, prs: List[PullRequest] + ) -> List[PullRequestRevertPRMapping]: + """ + This function takes a list of pull requests and for each pull request + checks if it is a revert pr or not. If it is a revert pr it tries to + create a mapping of that revert pr with the reverted pr and then returns + a list of those mappings + """ + + revert_pr_numbers: List[str] = [] + repo_ids: Set[str] = set() + repo_id_to_pr_number_to_id_map: Dict[str, Dict[str, str]] = {} + + for pr in prs: + revert_pr_number = self._get_revert_pr_number(pr.head_branch) + if revert_pr_number is None: + continue + + revert_pr_numbers.append(str(revert_pr_number)) + repo_ids.add(str(pr.repo_id)) + + if str(pr.repo_id) not in repo_id_to_pr_number_to_id_map: + repo_id_to_pr_number_to_id_map[str(pr.repo_id)] = {} + + repo_id_to_pr_number_to_id_map[str(pr.repo_id)][ + str(revert_pr_number) + ] = pr.id + + if len(revert_pr_numbers) == 0: + return [] + + reverted_prs: List[PullRequest] = ( + self.code_repo_service.get_reverted_prs_by_numbers( + list(repo_ids), revert_pr_numbers + ) + ) + + revert_pr_mappings: List[PullRequestRevertPRMapping] = [] + for rev_pr in reverted_prs: + repo_key_exists = repo_id_to_pr_number_to_id_map.get(str(rev_pr.repo_id)) + if repo_key_exists is None: + continue + + original_pr_id = repo_id_to_pr_number_to_id_map[str(rev_pr.repo_id)].get( + str(rev_pr.number) + ) + if original_pr_id is None: + continue + + revert_pr_mp = PullRequestRevertPRMapping() + revert_pr_mp.pr_id = original_pr_id + revert_pr_mp.actor_type = PullRequestRevertPRMappingActorType.SYSTEM + revert_pr_mp.actor = None + revert_pr_mp.reverted_pr = rev_pr.id + revert_pr_mp.updated_at = time_now() + revert_pr_mappings.append(revert_pr_mp) + + return revert_pr_mappings + + def _get_revert_pr_number(self, branch_name: str) -> Optional[int]: + """ + Extract the PR number from revert branch names. + Common patterns: + - revert-123-feature-branch + - revert-pr-123 + - revert-feature-branch-123 + """ + if not branch_name: + return None + + # Pattern to match revert branches (similar to GitHub) + pattern = r"revert-(\d+)-\w+" + + match = re.search(pattern, branch_name.lower()) + if match: + try: + return int(match.group(1)) + except (ValueError, IndexError): + pass + + return None + + +def get_revert_prs_bitbucket_sync_handler() -> RevertPRsBitbucketSyncHandler: + return RevertPRsBitbucketSyncHandler(CodeRepoService()) diff --git a/backend/analytics_server/mhq/service/external_integrations_service.py b/backend/analytics_server/mhq/service/external_integrations_service.py index 9a4bcac5e..53518e8b9 100644 --- a/backend/analytics_server/mhq/service/external_integrations_service.py +++ b/backend/analytics_server/mhq/service/external_integrations_service.py @@ -3,7 +3,9 @@ from github.Organization import Organization as GithubOrganization from mhq.exapi.models.gitlab import GitlabRepo, GitlabUser +from mhq.exapi.models.bitbucket import BitbucketRepo from mhq.exapi.gitlab import GitlabApiService +from mhq.exapi.bitbucket import BitbucketApiService from mhq.utils.log import LOG from mhq.exapi.github import GithubApiService from mhq.store.models import UserIdentityProvider @@ -98,6 +100,38 @@ def get_gitlab_user_projects(self, page_size: int, page: int) -> List[GitlabRepo return projects + def get_bitbucket_workspace_repo( + self, workspace: str, repo_slug: str + ) -> BitbucketRepo: + bitbucket_api_service = BitbucketApiService(self.access_token) + try: + repo: BitbucketRepo = bitbucket_api_service.get_workspace_repos( + workspace, repo_slug + ) + except Exception as e: + raise e + return repo + + def get_bitbucket_workspaces(self) -> List[Dict]: + bitbucket_api_service = BitbucketApiService(self.access_token) + try: + workspaces: List[Dict] = bitbucket_api_service.get_user_workspaces() + except Exception as e: + raise e + return workspaces + + def get_bitbucket_workspace_repositories( + self, workspace: str, page_size: int = 50 + ) -> List[BitbucketRepo]: + bitbucket_api_service = BitbucketApiService(self.access_token) + try: + repositories: List[BitbucketRepo] = bitbucket_api_service.get_workspace_repositories( + workspace, page_size + ) + except Exception as e: + raise e + return repositories + def get_external_integrations_service( org_id: str, user_identity_provider: UserIdentityProvider diff --git a/backend/analytics_server/mhq/service/incidents/integration.py b/backend/analytics_server/mhq/service/incidents/integration.py index fd10e09a6..97d0f7892 100644 --- a/backend/analytics_server/mhq/service/incidents/integration.py +++ b/backend/analytics_server/mhq/service/incidents/integration.py @@ -6,7 +6,7 @@ from mhq.store.models.incidents import IncidentProvider, IncidentSource from mhq.store.repos.core import CoreRepoService -GIT_INCIDENT_INTEGRATION_BUCKET = [IncidentProvider.GITHUB.value] +GIT_INCIDENT_INTEGRATION_BUCKET = [IncidentProvider.GITHUB.value, IncidentProvider.BITBUCKET.value] class IncidentsIntegrationService: diff --git a/backend/analytics_server/mhq/service/incidents/sync/etl_incidents_factory.py b/backend/analytics_server/mhq/service/incidents/sync/etl_incidents_factory.py index 571178196..a42b447e0 100644 --- a/backend/analytics_server/mhq/service/incidents/sync/etl_incidents_factory.py +++ b/backend/analytics_server/mhq/service/incidents/sync/etl_incidents_factory.py @@ -16,4 +16,7 @@ def __call__(self, provider: str) -> IncidentsProviderETLHandler: if provider == IncidentProvider.GITLAB.value: return get_incidents_sync_etl_handler(self.org_id) + if provider == IncidentProvider.BITBUCKET.value: + return get_incidents_sync_etl_handler(self.org_id) + raise NotImplementedError(f"Unknown provider - {provider}") diff --git a/backend/analytics_server/mhq/store/models/code/enums.py b/backend/analytics_server/mhq/store/models/code/enums.py index 5494e536f..6cb3cadbe 100644 --- a/backend/analytics_server/mhq/store/models/code/enums.py +++ b/backend/analytics_server/mhq/store/models/code/enums.py @@ -4,6 +4,7 @@ class CodeProvider(Enum): GITHUB = "github" GITLAB = "gitlab" + BITBUCKET = "bitbucket" class CodeBookmarkType(Enum): diff --git a/backend/analytics_server/mhq/store/models/incidents/enums.py b/backend/analytics_server/mhq/store/models/incidents/enums.py index 6b46cf80d..9a2c2babd 100644 --- a/backend/analytics_server/mhq/store/models/incidents/enums.py +++ b/backend/analytics_server/mhq/store/models/incidents/enums.py @@ -4,6 +4,7 @@ class IncidentProvider(Enum): GITHUB = "github" GITLAB = "gitlab" + BITBUCKET = "bitbucket" class IncidentSource(Enum): diff --git a/backend/analytics_server/mhq/store/models/integrations/enums.py b/backend/analytics_server/mhq/store/models/integrations/enums.py index 423bde139..5ba2f87b8 100644 --- a/backend/analytics_server/mhq/store/models/integrations/enums.py +++ b/backend/analytics_server/mhq/store/models/integrations/enums.py @@ -4,6 +4,7 @@ class UserIdentityProvider(Enum): GITHUB = "github" GITLAB = "gitlab" + BITBUCKET = "bitbucket" @classmethod def get_enum(self, provider: str): diff --git a/backend/analytics_server/mhq/utils/time.py b/backend/analytics_server/mhq/utils/time.py index a16313504..8140f90cf 100644 --- a/backend/analytics_server/mhq/utils/time.py +++ b/backend/analytics_server/mhq/utils/time.py @@ -273,5 +273,34 @@ def fill_missing_week_buckets( def dt_from_iso_time_string(j_str_dt) -> Optional[datetime]: if not j_str_dt: return None - dt_without_timezone = datetime.strptime(j_str_dt, "%Y-%m-%dT%H:%M:%S.%f%z") - return dt_without_timezone.astimezone(pytz.UTC) + + # List of common datetime formats used by different APIs + formats = [ + "%Y-%m-%dT%H:%M:%S.%f%z", # With microseconds and timezone + "%Y-%m-%dT%H:%M:%S%z", # Without microseconds but with timezone + "%Y-%m-%dT%H:%M:%S.%fZ", # With microseconds, Z timezone + "%Y-%m-%dT%H:%M:%SZ", # Without microseconds, Z timezone + "%Y-%m-%dT%H:%M:%S", # Without timezone + ] + + for fmt in formats: + try: + if fmt.endswith('%z'): + dt_without_timezone = datetime.strptime(j_str_dt, fmt) + return dt_without_timezone.astimezone(pytz.UTC) + elif fmt.endswith('Z'): + # Replace Z with +00:00 for proper parsing + j_str_dt_fixed = j_str_dt.replace('Z', '+00:00') + dt_without_timezone = datetime.strptime(j_str_dt_fixed, fmt.replace('Z', '%z')) + return dt_without_timezone.astimezone(pytz.UTC) + else: + # Assume UTC if no timezone info + dt_without_timezone = datetime.strptime(j_str_dt, fmt) + return dt_without_timezone.replace(tzinfo=pytz.UTC) + except ValueError: + continue + + # If none of the formats work, log an error and return None + from mhq.utils.log import LOG + LOG.warning(f"Could not parse datetime string: {j_str_dt}") + return None diff --git a/backend/analytics_server/tests/service/code/sync/test_etl_bitbucket_handler.py b/backend/analytics_server/tests/service/code/sync/test_etl_bitbucket_handler.py new file mode 100644 index 000000000..d5f7590d1 --- /dev/null +++ b/backend/analytics_server/tests/service/code/sync/test_etl_bitbucket_handler.py @@ -0,0 +1,135 @@ +from datetime import datetime +import uuid +import pytz +from unittest.mock import Mock + +from mhq.service.code.sync.etl_bitbucket_handler import BitbucketETLHandler +from mhq.exapi.models.bitbucket import BitbucketPR, BitbucketPRState +from mhq.store.models.code import PullRequestState +from mhq.store.models import UserIdentityProvider +from mhq.utils.string import uuid4_str + +ORG_ID = uuid4_str() + + +def test__to_pr_model_given_a_bitbucket_pr_returns_new_pr_model(): + """Test that BitbucketPR is correctly converted to PullRequest model.""" + repo_id = uuid4_str() + number = 123 + author = "test_user" + merged_at = datetime(2022, 6, 29, 10, 53, 15, tzinfo=pytz.UTC) + head_branch = "feature" + base_branch = "main" + title = "Test PR" + review_comments = 2 + + # Create a mock Bitbucket PR data structure + pr_data = { + "id": number, + "title": title, + "links": {"html": {"href": f"https://bitbucket.org/workspace/repo/pull-requests/{number}"}}, + "author": {"display_name": author}, + "reviewers": [{"display_name": "reviewer1"}], + "state": "MERGED", + "destination": {"branch": {"name": base_branch}}, + "source": {"branch": {"name": head_branch}}, + "created_on": "2022-06-29T10:53:15+00:00", + "updated_on": "2022-06-29T11:53:15+00:00", + "merge_commit": {"hash": "abcd1234"} + } + + bitbucket_pr = BitbucketPR(pr_data) + + # Mock diff stats + diff_stats = { + "additions": 10, + "deletions": 5, + "changed_files": 2 + } + + bitbucket_etl_handler = BitbucketETLHandler( + ORG_ID, + Mock(), # bitbucket_api_service + Mock(), # code_repo_service + Mock(), # code_etl_analytics_service + Mock() # bitbucket_revert_pr_sync_handler + ) + pr_model = bitbucket_etl_handler._to_pr_model( + pr=bitbucket_pr, + pr_model=None, + repo_id=repo_id, + review_comments=review_comments, + diff_stats=diff_stats, + ) + + # Assertions + assert pr_model.number == str(number) + assert pr_model.title == title + assert pr_model.author == author + assert pr_model.state == PullRequestState.MERGED + assert pr_model.base_branch == base_branch + assert pr_model.head_branch == head_branch + assert pr_model.repo_id == repo_id + assert pr_model.provider == UserIdentityProvider.BITBUCKET.value + assert pr_model.merge_commit_sha == "abcd1234" + assert pr_model.meta["code_stats"]["additions"] == 10 + assert pr_model.meta["code_stats"]["deletions"] == 5 + assert pr_model.meta["code_stats"]["changed_files"] == 2 + assert pr_model.meta["code_stats"]["comments"] == review_comments + + +def test__get_state_converts_bitbucket_state_to_internal_state(): + """Test that Bitbucket PR states are correctly mapped to internal states.""" + bitbucket_etl_handler = BitbucketETLHandler( + ORG_ID, + Mock(), # bitbucket_api_service + Mock(), # code_repo_service + Mock(), # code_etl_analytics_service + Mock() # bitbucket_revert_pr_sync_handler + ) + + # Test MERGED state + pr_data_merged = {"id": 1, "state": "MERGED"} + pr_merged = BitbucketPR(pr_data_merged) + assert bitbucket_etl_handler._get_state(pr_merged) == PullRequestState.MERGED + + # Test DECLINED state + pr_data_declined = {"id": 2, "state": "DECLINED"} + pr_declined = BitbucketPR(pr_data_declined) + assert bitbucket_etl_handler._get_state(pr_declined) == PullRequestState.CLOSED + + # Test SUPERSEDED state + pr_data_superseded = {"id": 3, "state": "SUPERSEDED"} + pr_superseded = BitbucketPR(pr_data_superseded) + assert bitbucket_etl_handler._get_state(pr_superseded) == PullRequestState.CLOSED + + # Test OPEN state + pr_data_open = {"id": 4, "state": "OPEN"} + pr_open = BitbucketPR(pr_data_open) + assert bitbucket_etl_handler._get_state(pr_open) == PullRequestState.OPEN + + +def test__get_merge_commit_sha_returns_correct_sha(): + """Test that merge commit SHA is correctly extracted.""" + bitbucket_etl_handler = BitbucketETLHandler( + ORG_ID, + Mock(), # bitbucket_api_service + Mock(), # code_repo_service + Mock(), # code_etl_analytics_service + Mock() # bitbucket_revert_pr_sync_handler + ) + + # Test with merge commit present + raw_data_with_merge = {"merge_commit": {"hash": "abcd1234"}} + sha = bitbucket_etl_handler._get_merge_commit_sha(raw_data_with_merge, PullRequestState.MERGED) + assert sha == "abcd1234" + + # Test with no merge commit (non-merged PR) + raw_data_no_merge = {} + sha = bitbucket_etl_handler._get_merge_commit_sha(raw_data_no_merge, PullRequestState.OPEN) + assert sha is None + + # Test with non-merged state + raw_data_closed = {"merge_commit": {"hash": "abcd1234"}} + sha = bitbucket_etl_handler._get_merge_commit_sha(raw_data_closed, PullRequestState.CLOSED) + assert sha is None diff --git a/web-server/pages/api/integrations/bitbucket/scopes.ts b/web-server/pages/api/integrations/bitbucket/scopes.ts new file mode 100644 index 000000000..5dfd6f7b0 --- /dev/null +++ b/web-server/pages/api/integrations/bitbucket/scopes.ts @@ -0,0 +1,115 @@ +import axios from 'axios'; +import * as yup from 'yup'; + +import { Endpoint, nullSchema } from '@/api-helpers/global'; + +const payloadSchema = yup.object({ + email: yup + .string() + .required('Email is required') + .email('Please enter a valid email address') + .trim() + .min(1, 'Email cannot be empty') + .max(100, 'Email too long'), + apiToken: yup + .string() + .required('API token is required') + .min(1, 'API token cannot be empty') + .max(500, 'API token too long') +}); + +const endpoint = new Endpoint(nullSchema); + +endpoint.handle.POST(payloadSchema, async (req, res) => { + try { + const { email, apiToken } = req.payload; + + // Basic email validation + if (!email?.trim() || !apiToken?.trim()) { + return res.status(400).json({ + message: 'Email and API token are required' + }); + } + + // Change this to the Atlassian Bitbucket Cloud REST API endpoint + const url = 'https://api.bitbucket.org/2.0/user'; + + const response = await axios({ + url, + method: 'GET', + headers: { + Authorization: `Basic ${Buffer.from( + `${email.trim()}:${apiToken}` + ).toString('base64')}`, + 'User-Agent': 'MiddlewareApp/1.0', + Accept: 'application/json', + 'Content-Type': 'application/json' + }, + timeout: 10000 + }); + + if (!response.headers) { + return res.status(400).json({ + message: 'Unable to retrieve permission information from BitBucket' + }); + } + + // Validate that we received user data + if (!response.data || typeof response.data !== 'object') { + return res.status(400).json({ + message: 'Invalid response from BitBucket API' + }); + } + + // Check for required user fields to ensure authentication was successful + if (!response.data.uuid || !response.data.username) { + return res.status(400).json({ + message: 'Bitbucket authentication successful but user data incomplete' + }); + } + + res.status(200).json({ + data: response.data, + headers: response.headers + }); + } catch (error: any) { + console.error('Error fetching Bitbucket user:', { + message: error.message, + status: error.response?.status, + hasCredentials: !!(req.payload?.email && req.payload?.apiToken), + url: 'https://api.bitbucket.org/2.0/user' + }); + + const status = error.response?.status || 500; + let message = 'Internal Server Error'; + + switch (status) { + case 401: + message = + 'Invalid Bitbucket credentials. Please check your email and API Token.'; + break; + case 403: + message = + 'Access forbidden. Check your API Token permissions or ensure it has not expired.'; + break; + case 404: + message = 'Bitbucket user not found. Please verify your email.'; + break; + case 429: + message = 'Rate limit exceeded. Please try again later.'; + break; + case 400: + message = 'Bad request. Please check your credentials format.'; + break; + default: + message = + error.response?.data?.error?.message || + error.message || + 'Failed to validate Bitbucket credentials'; + } + + res.status(status).json({ message }); + } +}); + +export default endpoint.serve(); diff --git a/web-server/pages/api/internal/[org_id]/git_provider_org.ts b/web-server/pages/api/internal/[org_id]/git_provider_org.ts index fae050e8a..dade0f0e2 100644 --- a/web-server/pages/api/internal/[org_id]/git_provider_org.ts +++ b/web-server/pages/api/internal/[org_id]/git_provider_org.ts @@ -1,6 +1,6 @@ import * as yup from 'yup'; -import { gitlabSearch, searchGithubRepos } from '@/api/internal/[org_id]/utils'; +import { gitlabSearch, searchGithubRepos, bitbucketSearch } from '@/api/internal/[org_id]/utils'; import { Endpoint } from '@/api-helpers/global'; import { Integration } from '@/constants/integrations'; import { dec } from '@/utils/auth-supplementary'; @@ -68,6 +68,18 @@ const getGitlabToken = async (org_id: ID) => { .then((r) => dec(r.access_token_enc_chunks)); }; +const getBitbucketToken = async (org_id: ID) => { + return await db('Integration') + .select() + .where({ + org_id, + name: Integration.BITBUCKET + }) + .returning('*') + .then(getFirstRow) + .then((r) => dec(r.access_token_enc_chunks)); +}; + const fetchMap = [ { provider: Integration.GITHUB, @@ -78,5 +90,10 @@ const fetchMap = [ provider: Integration.GITLAB, search: gitlabSearch, getToken: getGitlabToken + }, + { + provider: Integration.BITBUCKET, + search: bitbucketSearch, + getToken: getBitbucketToken } ]; diff --git a/web-server/pages/api/internal/[org_id]/utils.ts b/web-server/pages/api/internal/[org_id]/utils.ts index 3503c0116..8a4658132 100644 --- a/web-server/pages/api/internal/[org_id]/utils.ts +++ b/web-server/pages/api/internal/[org_id]/utils.ts @@ -269,6 +269,134 @@ export const gitlabSearch = async (pat: string, searchString: string) => { return searchGitlabRepos(pat, search); }; +// Bitbucket functions + +type BitbucketRepo = { + uuid: string; + name: string; + full_name: string; + description?: string; + language?: string; + mainbranch?: { + name: string; + }; + links: { + html: { + href: string; + }; + }; + owner: { + username: string; + }; +}; + +type BitbucketResponse = { + values: BitbucketRepo[]; + next?: string; +}; + +const BITBUCKET_API_URL = 'https://api.bitbucket.org/2.0'; + +export const searchBitbucketRepos = async ( + credentials: string, + searchString: string +): Promise => { + let urlString = convertUrlToQuery(searchString); + if (urlString !== searchString && urlString.includes('/')) { + try { + return await searchBitbucketRepoWithURL(credentials, urlString); + } catch (e) { + return await searchBitbucketReposWithNames(credentials, urlString); + } + } + return await searchBitbucketReposWithNames(credentials, urlString); +}; + +const searchBitbucketRepoWithURL = async ( + credentials: string, + searchString: string +): Promise => { + const apiUrl = `${BITBUCKET_API_URL}/repositories/${searchString}`; + + const response = await fetch(apiUrl, { + method: 'GET', + headers: { + Authorization: `Basic ${credentials}`, + 'Content-Type': 'application/json' + } + }); + + if (!response.ok) { + throw new Error(`Bitbucket API error: ${response.statusText}`); + } + + const repo = (await response.json()) as BitbucketRepo; + + return [ + { + id: repo.uuid.replace(/[{}]/g, ''), + name: repo.name, + desc: repo.description, + slug: repo.name, + parent: repo.owner.username, + web_url: repo.links.html.href, + branch: repo.mainbranch?.name, + language: repo.language, + provider: Integration.BITBUCKET + } + ] as BaseRepo[]; +}; + +const searchBitbucketReposWithNames = async ( + credentials: string, + searchString: string +): Promise => { + const apiUrl = `${BITBUCKET_API_URL}/repositories`; + const params = new URLSearchParams({ + q: `name~"${searchString}"`, + role: 'member', + pagelen: '50' + }); + + const response = await fetch(`${apiUrl}?${params}`, { + method: 'GET', + headers: { + Authorization: `Basic ${credentials}`, + 'Content-Type': 'application/json' + } + }); + + if (!response.ok) { + throw new Error(`Bitbucket API error: ${response.statusText}`); + } + + const responseBody = (await response.json()) as BitbucketResponse; + const repositories = responseBody.values || []; + + return repositories.map( + (repo) => + ({ + id: repo.uuid.replace(/[{}]/g, ''), + name: repo.name, + desc: repo.description, + slug: repo.name, + parent: repo.owner.username, + web_url: repo.links.html.href, + branch: repo.mainbranch?.name, + language: repo.language || null, + provider: Integration.BITBUCKET + }) as BaseRepo + ); +}; + +export const bitbucketSearch = async ( + credentials: string, + searchString: string +): Promise => { + let search = convertUrlToQuery(searchString); + return searchBitbucketRepos(credentials, search); +}; + const convertUrlToQuery = (url: string) => { let query = url; try { @@ -280,6 +408,7 @@ const convertUrlToQuery = (url: string) => { query = query.replace('http://', ''); query = query.replace('github.com/', ''); query = query.replace('gitlab.com/', ''); + query = query.replace('bitbucket.org/', ''); query = query.startsWith('www.') ? query.slice(4) : query; query = query.endsWith('/') ? query.slice(0, -1) : query; } @@ -303,4 +432,4 @@ const replaceURL = async (url: string): Promise => { } return url; -}; +}; \ No newline at end of file diff --git a/web-server/pages/api/resources/orgs/[org_id]/teams/v2.ts b/web-server/pages/api/resources/orgs/[org_id]/teams/v2.ts index bf0db917e..feda6a4a1 100644 --- a/web-server/pages/api/resources/orgs/[org_id]/teams/v2.ts +++ b/web-server/pages/api/resources/orgs/[org_id]/teams/v2.ts @@ -91,7 +91,7 @@ endpoint.handle.GET(getSchema, async (req, res) => { org_id, providers?.length ? (providers as Integration[]) - : [Integration.GITHUB, Integration.GITLAB] + : [Integration.GITHUB, Integration.GITLAB,Integration.BITBUCKET] ); res.send({ @@ -116,6 +116,7 @@ endpoint.handle.POST(postSchema, async (req, res) => { } as any as ReqRepoWithProvider); }); }, org_repos); + console.log('orgReposList in POST', orgReposList); const [team, onboardingState] = await Promise.all([ createTeam(org_id, name, []), getOnBoardingState(org_id) @@ -165,7 +166,7 @@ endpoint.handle.PATCH(patchSchema, async (req, res) => { } as any as ReqRepoWithProvider); }); }, org_repos); - +console.log('orgReposList in Patch', orgReposList); const [team] = await Promise.all([ updateTeam(id, name, []), handleRequest<(Row<'TeamRepos'> & Row<'OrgRepo'>)[]>(`/teams/${id}/repos`, { @@ -301,7 +302,7 @@ const updateReposWorkflows = async ( .whereIn('name', reposForWorkflows) .where('org_id', org_id) .andWhere('is_active', true) - .and.whereIn('provider', [Integration.GITHUB, Integration.GITLAB]); + .and.whereIn('provider', [Integration.GITHUB, Integration.GITLAB, Integration.BITBUCKET]); const groupedRepos = groupBy(dbReposForWorkflows, 'name'); diff --git a/web-server/pages/integrations.tsx b/web-server/pages/integrations.tsx index 2ec60be91..be03e2539 100644 --- a/web-server/pages/integrations.tsx +++ b/web-server/pages/integrations.tsx @@ -13,6 +13,7 @@ import { ROUTES } from '@/constants/routes'; import { FetchState } from '@/constants/ui-states'; import { GithubIntegrationCard } from '@/content/Dashboards/GithubIntegrationCard'; import { GitlabIntegrationCard } from '@/content/Dashboards/GitlabIntegrationCard'; +import { BitbucketIntegrationCard } from '@/content/Dashboards/BitbucketIntegrationCard'; import { PageWrapper } from '@/content/PullRequests/PageWrapper'; import { useAuth } from '@/hooks/useAuth'; import { useBoolState, useEasyState } from '@/hooks/useEasyState'; @@ -163,6 +164,7 @@ const Content = () => { + {showCreationCTA && ( diff --git a/web-server/public/assets/bitbucketPAT.png b/web-server/public/assets/bitbucketPAT.png new file mode 100644 index 000000000..ae85487f7 Binary files /dev/null and b/web-server/public/assets/bitbucketPAT.png differ diff --git a/web-server/src/api-helpers/axios.ts b/web-server/src/api-helpers/axios.ts index d5b031fe7..7836dcace 100644 --- a/web-server/src/api-helpers/axios.ts +++ b/web-server/src/api-helpers/axios.ts @@ -83,19 +83,28 @@ const bffInterceptor = loggerInterceptor('bff'); internal.interceptors.request.use(bffInterceptor); -export const handleRequest = ( +export const handleRequest = ( url: string, - params: AxiosRequestConfig = { method: 'get' } -): Promise => + params: AxiosRequestConfig = { method: 'get' }, + includeHeaders: B = false as B +): Promise => internal({ url, ...params, - headers: { 'Content-Type': 'application/json' } + headers: { + 'Content-Type': 'application/json', + ...params.headers + } }) - .then(handleThen) + .then((r: any) => handleThen(r, includeHeaders)) .catch(handleCatch); -export const handleThen = (r: AxiosResponse) => r.data; +export const handleThen = ( + r: AxiosResponse, + includeHeaders: B = false as B +): B extends true ? { data: T; headers: any } : T => + (includeHeaders ? { data: r.data, headers: r.headers } : r.data) as any; + export const handleCatch = (r: { response: AxiosResponse }) => { throw r.response; }; diff --git a/web-server/src/components/Teams/CreateTeams.tsx b/web-server/src/components/Teams/CreateTeams.tsx index 726c55c27..d28a2afdc 100644 --- a/web-server/src/components/Teams/CreateTeams.tsx +++ b/web-server/src/components/Teams/CreateTeams.tsx @@ -31,6 +31,7 @@ import { DeploymentWorkflowSelector } from '@/components/WorkflowSelector'; import { Integration } from '@/constants/integrations'; import { useBoolState, useEasyState } from '@/hooks/useEasyState'; import GitlabIcon from '@/mocks/icons/gitlab.svg'; +import BitbucketIcon from '@/mocks/icons/bitbucket.svg' import { BaseRepo, DeploymentSources } from '@/types/resources'; import { trimWithEllipsis } from '@/utils/stringFormatting'; @@ -257,7 +258,7 @@ const TeamRepos: FC = () => { )} renderOption={(props, option, { selected }) => (
  • - { textOverflow: 'ellipsis', overflow: 'hidden' }} - > + > - ) : undefined + checkOverflow(option) ? ( + + ) : undefined } > - {option.provider === Integration.GITHUB ? ( - - ) : ( - - )} - - {addEllipsis(option.parent, MAX_LENGTH_PARENT_NAME)} - + {option.provider === Integration.GITHUB ? ( + + ) : option.provider === Integration.BITBUCKET ? ( + + ) : ( + + )} + + {addEllipsis(option.parent, MAX_LENGTH_PARENT_NAME)} + {addEllipsis(option.name, MAX_LENGTH_REPO_NAME)} {selected ? : null} - +
  • )} renderTags={() => null} @@ -398,6 +401,8 @@ const DisplayRepos: FC = () => { > {repo.provider === Integration.GITHUB ? ( + ) : repo.provider === Integration.BITBUCKET ? ( + ) : ( )} diff --git a/web-server/src/content/Dashboards/BitbucketIntegrationCard.tsx b/web-server/src/content/Dashboards/BitbucketIntegrationCard.tsx new file mode 100644 index 000000000..798579adf --- /dev/null +++ b/web-server/src/content/Dashboards/BitbucketIntegrationCard.tsx @@ -0,0 +1,247 @@ +import { + ArrowForwardIosRounded, + ChevronRightRounded, + SettingsRounded + } from '@mui/icons-material'; + import { Button, useTheme } from '@mui/material'; + import CircularProgress from '@mui/material/CircularProgress'; + import { useSnackbar } from 'notistack'; + import { FC, ReactNode, useEffect } from 'react'; + + import { FlexBox } from '@/components/FlexBox'; + import { Line } from '@/components/Text'; + import { track } from '@/constants/events'; + import { FetchState } from '@/constants/ui-states'; + import { bitBucketIntegrationDisplay } from '@/content/Dashboards/githubIntegration'; + import { useIntegrationHandlers } from '@/content/Dashboards/useIntegrationHandlers'; + import { useAuth } from '@/hooks/useAuth'; + import { useBoolState } from '@/hooks/useEasyState'; + import { fetchCurrentOrg } from '@/slices/auth'; + import { useDispatch, useSelector } from '@/store'; + + const cardRadius = 10.5; + const cardBorder = 1.5; + const getRadiusWithPadding = (radius: number, padding: number) => + `${radius + padding}px`; + + export const BitbucketIntegrationCard = () => { + const theme = useTheme(); + const { integrations } = useAuth(); + const isBitbucketIntegrated = integrations.bitbucket; + const sliceLoading = useSelector( + (s: { auth: { requests: { org: FetchState; }; }; }) => s.auth.requests.org === FetchState.REQUEST + ); + const { link, unlink } = useIntegrationHandlers(); + + const localLoading = useBoolState(false); + + const isLoading = sliceLoading || localLoading.value; + + const dispatch = useDispatch(); + + const { enqueueSnackbar } = useSnackbar(); + + return ( + + {isBitbucketIntegrated && ( + + + + )} + + + + + + {bitBucketIntegrationDisplay.icon} + + + {bitBucketIntegrationDisplay.name} + + + + { + track( + isBitbucketIntegrated + ? 'INTEGRATION_UNLINK_TRIGGERED' + : 'INTEGRATION_LINK_TRIGGERED', + { integration_name: bitBucketIntegrationDisplay.name } + ); + if (!isBitbucketIntegrated) { + link.bitbucket(); + return; + } + const shouldExecute = window.confirm( + 'Are you sure you want to unlink?' + ); + if (shouldExecute) { + localLoading.true(); + await unlink + .bitbucket() + .then(() => { + enqueueSnackbar('Bitbucket unlinked successfully', { + variant: 'success' + }); + }) + .then(async () => dispatch(fetchCurrentOrg())) + .catch((e: any) => { + console.error('Failed to unlink Bitbucket', e); + enqueueSnackbar('Failed to unlink Bitbucket', { + variant: 'error' + }); + }) + .finally(localLoading.false); + } + }} + label={!isBitbucketIntegrated ? 'Link' : 'Unlink'} + bgOpacity={!isBitbucketIntegrated ? 0.45 : 0.25} + endIcon={ + isLoading ? ( + + ) : ( + + ) + } + minWidth="72px" + /> + + + + + ); + }; + + const IntegrationActionsButton: FC<{ + onClick: () => void | Promise; + label: ReactNode; + bgOpacity?: number; + startIcon?: ReactNode; + endIcon?: ReactNode; + minWidth?: string; + }> = ({ + label, + onClick, + bgOpacity = 0.45, + endIcon = ( + + ), + startIcon = , + minWidth = '80px' + }) => { + const theme = useTheme(); + + return ( + + ); + }; + + const LinkedIcon = () => { + const isVisible = useBoolState(false); + useEffect(() => { + setTimeout(isVisible.true, 200); + }, [isVisible.true]); + return ( + + + + + + + + + + + ); + }; diff --git a/web-server/src/content/Dashboards/ConfigureBitbucketModalBody.tsx b/web-server/src/content/Dashboards/ConfigureBitbucketModalBody.tsx new file mode 100644 index 000000000..4a3bbf7ee --- /dev/null +++ b/web-server/src/content/Dashboards/ConfigureBitbucketModalBody.tsx @@ -0,0 +1,332 @@ +import { LoadingButton } from '@mui/lab'; +import { Divider, Link, TextField, alpha } from '@mui/material'; +import Image from 'next/image'; +import { useSnackbar } from 'notistack'; +import { FC, useCallback, useMemo } from 'react'; + +import { FlexBox } from '@/components/FlexBox'; +import { Line } from '@/components/Text'; +import { Integration } from '@/constants/integrations'; +import { useAuth } from '@/hooks/useAuth'; +import { useBoolState, useEasyState } from '@/hooks/useEasyState'; +import { fetchCurrentOrg } from '@/slices/auth'; +import { fetchTeams } from '@/slices/team'; +import { useDispatch } from '@/store'; +import { linkProvider, checkBitBucketValidity } from '@/utils/auth'; +import { depFn } from '@/utils/fn'; + +interface ConfigureBitbucketModalBodyProps { + onClose: () => void; +} + +interface FormErrors { + email: string; + token: string; +} + +export const ConfigureBitbucketModalBody: FC< + ConfigureBitbucketModalBodyProps +> = ({ onClose }) => { + const email = useEasyState(''); + const token = useEasyState(''); + const { orgId } = useAuth(); + const { enqueueSnackbar } = useSnackbar(); + const dispatch = useDispatch(); + const isLoading = useBoolState(); + + const showEmailError = useEasyState(''); + const showTokenError = useEasyState(''); + + const setEmailError = useCallback( + (err: string) => depFn(showEmailError.set, err), + [showEmailError.set] + ); + const setTokenError = useCallback( + (err: string) => depFn(showTokenError.set, err), + [showTokenError.set] + ); + + const clearErrors = useCallback(() => { + showEmailError.set(''); + showTokenError.set(''); + }, [showEmailError, showTokenError]); + + const validateForm = useCallback((): FormErrors => { + const errors: FormErrors = { email: '', token: '' }; + + if (!email.value.trim()) { + errors.email = 'Please enter your Bitbucket email'; + } + + if (!token.value.trim()) { + errors.token = 'Please enter your API Token'; + } + + return errors; + }, [email.value, token.value]); + + const handleEmailChange = useCallback( + (val: string) => { + email.set(val); + if (showEmailError.value) { + showEmailError.set(''); + } + }, + [email, showEmailError] + ); + + const handleTokenChange = useCallback( + (val: string) => { + token.set(val); + if (showTokenError.value) { + showTokenError.set(''); + } + }, + [token, showTokenError] + ); + + const handleSubmission = useCallback(async () => { + clearErrors(); + + const errors = validateForm(); + if (errors.email || errors.token) { + if (errors.email) setEmailError(errors.email); + if (errors.token) setTokenError(errors.token); + return; + } + + depFn(isLoading.true); + + try { + const res = await checkBitBucketValidity(email.value.trim(), token.value); + + // const scopeHeader = + // res.data.headers?.['X-Oauth-Scopes'] || + // res.data.headers?.['x-oauth-scopes']; + // console.log(scopeHeader); + // if (!scopeHeader) { + // throw new Error( + // 'Unable to verify API Token permissions. Please ensure your API Token has the required scopes.' + // ); + // } + + // const scopes = scopeHeader + // .split(',') + // .map((s: string) => s.trim()) + // .filter(Boolean); + // const missing = getMissingBitBucketScopes(scopes); + + // if (missing.length > 0) { + // throw new Error( + // `API Token is missing required scopes: ${missing.join( + // ', ' + // )}. Please regenerate with all required permissions.` + // ); + // } + + const encodedCredentials = btoa(`${email.value.trim()}:${token.value}`); + await linkProvider(encodedCredentials, orgId, Integration.BITBUCKET, { + email: email.value.trim() + }); + + await Promise.all([ + dispatch(fetchCurrentOrg()), + dispatch(fetchTeams({ org_id: orgId })) + ]); + + enqueueSnackbar('Bitbucket linked successfully', { + variant: 'success', + autoHideDuration: 3000 + }); + + onClose(); + } catch (err: any) { + console.error('Error linking Bitbucket:', err); + + const errorMessage = + err.message || 'Failed to link Bitbucket. Please try again.'; + + // Categorize errors for better UX + if ( + errorMessage.toLowerCase().includes('email') || + errorMessage.toLowerCase().includes('user not found') + ) { + setEmailError(errorMessage); + } else if ( + errorMessage.toLowerCase().includes('token') || + errorMessage.toLowerCase().includes('unauthorized') || + errorMessage.toLowerCase().includes('authentication') + ) { + setTokenError('Invalid API Token. Please check your credentials.'); + } else if (errorMessage.toLowerCase().includes('scope')) { + setTokenError(errorMessage); + } else { + setTokenError(errorMessage); + } + } finally { + depFn(isLoading.false); + } + }, [ + clearErrors, + validateForm, + email.value, + token.value, + isLoading, + setEmailError, + setTokenError, + orgId, + dispatch, + enqueueSnackbar, + onClose + ]); + + const handleKeyDown = useCallback( + (e: React.KeyboardEvent, action: 'focus-token' | 'submit') => { + if (e.key === 'Enter') { + e.preventDefault(); + if (action === 'focus-token') { + document.getElementById('bitbucket-token')?.focus(); + } else { + handleSubmission(); + } + } + }, + [handleSubmission] + ); + + return ( + + + + handleEmailChange(e.currentTarget.value)} + onKeyDown={(e) => handleKeyDown(e, 'focus-token')} + disabled={isLoading.value} + fullWidth + autoComplete="email" + /> + handleTokenChange(e.currentTarget.value)} + onKeyDown={(e) => handleKeyDown(e, 'submit')} + disabled={isLoading.value} + fullWidth + autoComplete="current-password" + /> + + + + + Generate an API Token{' '} + + here + + + + + Link Bitbucket + + + + + + + ); +}; + +const TokenPermissions: FC = () => { + const imageLoaded = useBoolState(false); + + const expandedStyles = useMemo(() => { + const base = { + border: `2px solid ${alpha('#2684FF', 0.6)}`, + transition: 'all 0.8s ease', + borderRadius: '8px', + opacity: 1, + width: '126px', + position: 'absolute' as const, + maxWidth: 'calc(100% - 48px)', + left: '12px' + }; + + const positions = [ + { top: '300px', height: '32px' }, + { top: '360px', height: '32px' }, + { top: '420px', height: '32px' } + ]; + + return positions.map((cfg) => ({ ...cfg, ...base })); + }, []); + + return ( + +
    + Bitbucket App Password required permissions setup + + {imageLoaded.value && + expandedStyles.map((style, index) => ( + + ))} + + {!imageLoaded.value && ( + + Loading permissions guide... + + )} +
    + + Scroll to see all required permissions + +
    + ); +}; diff --git a/web-server/src/content/Dashboards/githubIntegration.tsx b/web-server/src/content/Dashboards/githubIntegration.tsx index 4a6cad422..e5c584a20 100644 --- a/web-server/src/content/Dashboards/githubIntegration.tsx +++ b/web-server/src/content/Dashboards/githubIntegration.tsx @@ -2,6 +2,7 @@ import faker from '@faker-js/faker'; import { GitHub } from '@mui/icons-material'; import GitlabIcon from '@/mocks/icons/gitlab.svg'; +import BitbucketIcon from '@/mocks/icons/bitbucket.svg' export const githubIntegrationsDisplay = { id: faker.datatype.uuid(), @@ -23,4 +24,14 @@ export const gitLabIntegrationDisplay = { icon: } as IntegrationItem; +export const bitBucketIntegrationDisplay = { + id: faker.datatype.uuid(), + type: 'bitbucket', + name: 'BitBucket', + description: 'Code insights & blockers', + color: '#fff', + bg: `linear-gradient(-45deg, rgba(69, 110, 232, 0.6) 0%, rgba(24, 176, 236, 0.6) 100%)`, + icon: +} as IntegrationItem; + export type IntegrationItem = typeof githubIntegrationsDisplay; diff --git a/web-server/src/content/Dashboards/useIntegrationHandlers.tsx b/web-server/src/content/Dashboards/useIntegrationHandlers.tsx index 0ded2063c..e5f52e46a 100644 --- a/web-server/src/content/Dashboards/useIntegrationHandlers.tsx +++ b/web-server/src/content/Dashboards/useIntegrationHandlers.tsx @@ -7,6 +7,7 @@ import { useAuth } from '@/hooks/useAuth'; import { unlinkProvider } from '@/utils/auth'; import { ConfigureGithubModalBody } from './ConfigureGithubModalBody'; +import { ConfigureBitbucketModalBody } from './ConfigureBitbucketModalBody'; export const useIntegrationHandlers = () => { const { orgId } = useAuth(); @@ -27,11 +28,18 @@ export const useIntegrationHandlers = () => { title: 'Configure Gitlab', body: , showCloseIcon: true + }), + bitbucket: () => + addModal({ + title: 'Configure Bitbucket', + body: , + showCloseIcon: true }) }, unlink: { github: () => unlinkProvider(orgId, Integration.GITHUB), - gitlab: () => unlinkProvider(orgId, Integration.GITLAB) + gitlab: () => unlinkProvider(orgId, Integration.GITLAB), + bitbucket: () => unlinkProvider(orgId, Integration.BITBUCKET) } }; diff --git a/web-server/src/utils/auth.ts b/web-server/src/utils/auth.ts index 4cc2d93bb..ad64fd5f0 100644 --- a/web-server/src/utils/auth.ts +++ b/web-server/src/utils/auth.ts @@ -89,3 +89,105 @@ export const getMissingGitLabScopes = (scopes: string[]): string[] => { ); return missingScopes; }; + +// BitBucket Functions +interface BitBucketValidationResponse { + headers: Record; + data?: any; +} + +interface BitBucketCredentials { + email: string; + apiToken: string; +} + +export const checkBitBucketValidity = async ( + email: string, + apiToken: string +): Promise => { + if (!email?.trim() || !apiToken?.trim()) { + throw new Error('Email and API Token are required'); + } + + // Basic email validation + const trimmedEmail = email.trim(); + const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/; + if (!emailRegex.test(trimmedEmail)) { + throw new Error('Please enter a valid email address.'); + } + + try { + const response = await axios.post( + '/api/integrations/bitbucket/scopes', + { + email: trimmedEmail, + apiToken: apiToken + } as BitBucketCredentials, + { + headers: { + 'Content-Type': 'application/json' + }, + timeout: 15000 // Increased timeout for better reliability + } + ); + + // Validate response structure + if (!response.data || !response.data.headers) { + throw new Error('Invalid response from BitBucket API'); + } + + return response.data; + } catch (error: any) { + if (error.code === 'ECONNABORTED') { + throw new Error( + 'Request timeout. Please check your internet connection and try again.' + ); + } + + if (error.response?.status === 401) { + throw new Error( + 'Invalid email or API Token. Please verify your credentials.' + ); + } + + if (error.response?.status === 403) { + throw new Error( + 'Access forbidden. Please ensure your API Token has the required permissions.' + ); + } + + if (error.response?.status >= 500) { + throw new Error( + 'Bitbucket service is currently unavailable. Please try again later.' + ); + } + + const message = + error.response?.data?.message || + error.message || + 'Unable to validate Bitbucket credentials. Please try again.'; + throw new Error(message); + } +}; + +const BITBUCKET_SCOPES = [ + 'issue', + 'pullrequest', + 'project', + 'account' +] as const; + +export const getMissingBitBucketScopes = (userScopes: string[]): string[] => { + if (!Array.isArray(userScopes)) { + return [...BITBUCKET_SCOPES]; + } + + const normalizedUserScopes = userScopes + .map((scope) => scope.trim().toLowerCase()) + .filter(Boolean); + + return BITBUCKET_SCOPES.filter( + (requiredScope) => + !normalizedUserScopes.includes(requiredScope.toLowerCase()) + ); +};