diff --git a/apiserver/app.py b/apiserver/app.py index fec4776f7..3f6d9ce65 100644 --- a/apiserver/app.py +++ b/apiserver/app.py @@ -1,4 +1,5 @@ from flask import Flask +from dora.store import configure_db_with_app from env import load_app_env load_app_env() @@ -21,6 +22,7 @@ app.register_blueprint(deployment_analytics_api) app.register_blueprint(integrations_api) +configure_db_with_app(app) initialize_database(app) diff --git a/apiserver/dora/store/__init__.py b/apiserver/dora/store/__init__.py index 30ac7ee05..45f48b876 100644 --- a/apiserver/dora/store/__init__.py +++ b/apiserver/dora/store/__init__.py @@ -1,37 +1,22 @@ from os import getenv -from sqlalchemy import create_engine -from sqlalchemy.orm import Session, declarative_base +from flask_sqlalchemy import SQLAlchemy -from dora.utils.log import LOG +db = SQLAlchemy() -DB_HOST = getenv("DB_HOST") -DB_PORT = getenv("DB_PORT") -DB_USER = getenv("DB_USER") -DB_PASS = getenv("DB_PASS") -DB_NAME = getenv("DB_NAME") -ENVIRONMENT = getenv("ENVIRONMENT") +def configure_db_with_app(app): -engine = create_engine( - f"postgresql://{DB_USER}:{DB_PASS}@{DB_HOST}:{DB_PORT}/{DB_NAME}", - connect_args={"application_name": f"dora--{ENVIRONMENT}"}, -) -session = Session(engine) + DB_HOST = getenv("DB_HOST") + DB_PORT = getenv("DB_PORT") + DB_USER = getenv("DB_USER") + DB_PASS = getenv("DB_PASS") + DB_NAME = getenv("DB_NAME") + ENVIRONMENT = getenv("ENVIRONMENT", "local") -Base = declarative_base() + connection_uri = f"postgresql://{DB_USER}:{DB_PASS}@{DB_HOST}:{DB_PORT}/{DB_NAME}?application_name=dora--{ENVIRONMENT}" - -def rollback_on_exc(func): - def wrapper(*args, **kwargs): - try: - return func(*args, **kwargs) - except Exception as e: - session.rollback() - LOG.error(f"Error in {func.__name__} - {str(e)}") - raise - finally: - # session.close() - pass - - return wrapper + app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False + app.config["SQLALCHEMY_DATABASE_URI"] = connection_uri + app.config["SQLALCHEMY_ENGINE_OPTIONS"] = {"pool_size": 20, "max_overflow": 5} + db.init_app(app) diff --git a/apiserver/dora/store/initialise_db.py b/apiserver/dora/store/initialise_db.py index 8846367a7..7439662b8 100644 --- a/apiserver/dora/store/initialise_db.py +++ b/apiserver/dora/store/initialise_db.py @@ -1,14 +1,13 @@ -from dora.store import rollback_on_exc, session +from dora.store import db from dora.store.models import Organization from dora.utils.string import uuid4_str from dora.utils.time import time_now -@rollback_on_exc def initialize_database(app): with app.app_context(): default_org = ( - session.query(Organization) + db.session.query(Organization) .filter(Organization.name == "default") .one_or_none() ) @@ -20,8 +19,8 @@ def initialize_database(app): domain="default", created_at=time_now(), ) - session.add(default_org) - session.commit() + db.session.add(default_org) + db.session.commit() if __name__ == "__main__": diff --git a/apiserver/dora/store/models/code/pull_requests.py b/apiserver/dora/store/models/code/pull_requests.py index d66f2aaf5..0be275602 100644 --- a/apiserver/dora/store/models/code/pull_requests.py +++ b/apiserver/dora/store/models/code/pull_requests.py @@ -1,10 +1,9 @@ from datetime import datetime -import pytz -from sqlalchemy import Column, String, DateTime, ForeignKey, Integer, func +from sqlalchemy import func from sqlalchemy.dialects.postgresql import UUID, JSONB, ARRAY, ENUM -from dora.store import Base +from dora.store import db from dora.store.models.code.enums import ( PullRequestEventType, PullRequestState, @@ -12,38 +11,38 @@ ) -class PullRequest(Base): +class PullRequest(db.Model): __tablename__ = "PullRequest" - id = Column(UUID(as_uuid=True), primary_key=True) - repo_id = Column(UUID(as_uuid=True), ForeignKey("OrgRepo.id")) - title = Column(String) - url = Column(String) - number = Column(String) - author = Column(String) - state = Column(ENUM(PullRequestState)) - requested_reviews = Column(ARRAY(String)) - base_branch = Column(String) - head_branch = Column(String) - data = Column(JSONB) - created_at = Column(DateTime(timezone=True)) - updated_at = Column(DateTime(timezone=True)) - state_changed_at = Column(DateTime(timezone=True)) - first_response_time = Column(Integer) - rework_time = Column(Integer) - merge_time = Column(Integer) - cycle_time = Column(Integer) - reviewers = Column(ARRAY(String)) - meta = Column(JSONB) - provider = Column(String) - rework_cycles = Column(Integer, default=0) - first_commit_to_open = Column(Integer) - merge_to_deploy = Column(Integer) - lead_time = Column(Integer) - merge_commit_sha = Column(String) - created_in_db_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_in_db_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + id = db.Column(UUID(as_uuid=True), primary_key=True) + repo_id = db.Column(UUID(as_uuid=True), db.ForeignKey("OrgRepo.id")) + title = db.Column(db.String) + url = db.Column(db.String) + number = db.Column(db.String) + author = db.Column(db.String) + state = db.Column(ENUM(PullRequestState)) + requested_reviews = db.Column(ARRAY(db.String)) + base_branch = db.Column(db.String) + head_branch = db.Column(db.String) + data = db.Column(JSONB) + created_at = db.Column(db.DateTime(timezone=True)) + updated_at = db.Column(db.DateTime(timezone=True)) + state_changed_at = db.Column(db.DateTime(timezone=True)) + first_response_time = db.Column(db.Integer) + rework_time = db.Column(db.Integer) + merge_time = db.Column(db.Integer) + cycle_time = db.Column(db.Integer) + reviewers = db.Column(ARRAY(db.String)) + meta = db.Column(JSONB) + provider = db.Column(db.String) + rework_cycles = db.Column(db.Integer, default=0) + first_commit_to_open = db.Column(db.Integer) + merge_to_deploy = db.Column(db.Integer) + lead_time = db.Column(db.Integer) + merge_commit_sha = db.Column(db.String) + created_in_db_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_in_db_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) def __eq__(self, other): @@ -80,20 +79,20 @@ def username(self) -> str: return self.meta.get("user_profile", {}).get("username", "") -class PullRequestEvent(Base): +class PullRequestEvent(db.Model): __tablename__ = "PullRequestEvent" - id = Column(UUID(as_uuid=True), primary_key=True) - pull_request_id = Column(UUID(as_uuid=True), ForeignKey("PullRequest.id")) - type = Column(ENUM(PullRequestEventType)) - data = Column(JSONB) - created_at = Column(DateTime(timezone=True)) - idempotency_key = Column(String) - org_repo_id = Column(UUID(as_uuid=True), ForeignKey("OrgRepo.id")) - actor_username = Column(String) - created_in_db_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_in_db_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + id = db.Column(UUID(as_uuid=True), primary_key=True) + pull_request_id = db.Column(UUID(as_uuid=True), db.ForeignKey("PullRequest.id")) + type = db.Column(ENUM(PullRequestEventType)) + data = db.Column(JSONB) + created_at = db.Column(db.DateTime(timezone=True)) + idempotency_key = db.Column(db.String) + org_repo_id = db.Column(UUID(as_uuid=True), db.ForeignKey("OrgRepo.id")) + actor_username = db.Column(db.String) + created_in_db_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_in_db_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) @property @@ -107,42 +106,42 @@ def state(self): return "" -class PullRequestCommit(Base): +class PullRequestCommit(db.Model): __tablename__ = "PullRequestCommit" - hash = Column(String, primary_key=True) - pull_request_id = Column(UUID(as_uuid=True), ForeignKey("PullRequest.id")) - message = Column(String) - url = Column(String) - data = Column(JSONB) - author = Column(String) - created_at = Column(DateTime(timezone=True)) - org_repo_id = Column(UUID(as_uuid=True), ForeignKey("OrgRepo.id")) - created_in_db_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_in_db_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + hash = db.Column(db.String, primary_key=True) + pull_request_id = db.Column(UUID(as_uuid=True), db.ForeignKey("PullRequest.id")) + message = db.Column(db.String) + url = db.Column(db.String) + data = db.Column(JSONB) + author = db.Column(db.String) + created_at = db.Column(db.DateTime(timezone=True)) + org_repo_id = db.Column(UUID(as_uuid=True), db.ForeignKey("OrgRepo.id")) + created_in_db_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_in_db_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) -class PullRequestRevertPRMapping(Base): +class PullRequestRevertPRMapping(db.Model): __tablename__ = "PullRequestRevertPRMapping" - pr_id = Column( + pr_id = db.Column( UUID(as_uuid=True), - ForeignKey("PullRequest.id"), + db.ForeignKey("PullRequest.id"), primary_key=True, nullable=False, ) - actor_type = Column( + actor_type = db.Column( ENUM(PullRequestRevertPRMappingActorType), primary_key=True, nullable=False ) - actor = Column(UUID(as_uuid=True), ForeignKey("Users.id")) - reverted_pr = Column( - UUID(as_uuid=True), ForeignKey("PullRequest.id"), nullable=False + actor = db.Column(UUID(as_uuid=True), db.ForeignKey("Users.id")) + reverted_pr = db.Column( + UUID(as_uuid=True), db.ForeignKey("PullRequest.id"), nullable=False ) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) def __hash__(self): diff --git a/apiserver/dora/store/models/code/repository.py b/apiserver/dora/store/models/code/repository.py index 620b85cd3..12ec95e18 100644 --- a/apiserver/dora/store/models/code/repository.py +++ b/apiserver/dora/store/models/code/repository.py @@ -3,10 +3,10 @@ from typing import Tuple import pytz -from sqlalchemy import Column, String, Boolean, DateTime, ForeignKey, func +from sqlalchemy import func from sqlalchemy.dialects.postgresql import UUID, JSONB, ARRAY, ENUM -from dora.store import Base +from dora.store import db from dora.store.models.code.enums import ( CodeProvider, BookmarkType, @@ -14,24 +14,24 @@ ) -class OrgRepo(Base): +class OrgRepo(db.Model): __tablename__ = "OrgRepo" - id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - org_id = Column(UUID(as_uuid=True), ForeignKey("Organization.id")) - name = Column(String) - provider = Column(String) - org_name = Column(String) - default_branch = Column(String) - language = Column(String) - contributors = Column(JSONB) - idempotency_key = Column(String) - slug = Column(String) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + org_id = db.Column(UUID(as_uuid=True), db.ForeignKey("Organization.id")) + name = db.Column(db.String) + provider = db.Column(db.String) + org_name = db.Column(db.String) + default_branch = db.Column(db.String) + language = db.Column(db.String) + contributors = db.Column(JSONB) + idempotency_key = db.Column(db.String) + slug = db.Column(db.String) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) - is_active = Column(Boolean, default=True) + is_active = db.Column(db.Boolean, default=True) @property def url(self): @@ -51,50 +51,54 @@ def __hash__(self): return hash(self.id) -class TeamRepos(Base): +class TeamRepos(db.Model): __tablename__ = "TeamRepos" - team_id = Column(UUID(as_uuid=True), ForeignKey("Team.id"), primary_key=True) - org_repo_id = Column(UUID(as_uuid=True), ForeignKey("OrgRepo.id"), primary_key=True) - prod_branch = Column(String) - prod_branches = Column(ARRAY(String)) - deployment_type = Column( + team_id = db.Column(UUID(as_uuid=True), db.ForeignKey("Team.id"), primary_key=True) + org_repo_id = db.Column( + UUID(as_uuid=True), db.ForeignKey("OrgRepo.id"), primary_key=True + ) + prod_branch = db.Column(db.String) + prod_branches = db.Column(ARRAY(db.String)) + deployment_type = db.Column( ENUM(TeamReposDeploymentType), default=TeamReposDeploymentType.PR_MERGE ) - is_active = Column(Boolean, default=True) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + is_active = db.Column(db.Boolean, default=True) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) -class RepoSyncLogs(Base): +class RepoSyncLogs(db.Model): __tablename__ = "RepoSyncLogs" - repo_id = Column(UUID(as_uuid=True), ForeignKey("OrgRepo.id"), primary_key=True) - synced_at = Column(DateTime(timezone=True), server_default=func.now()) + repo_id = db.Column( + UUID(as_uuid=True), db.ForeignKey("OrgRepo.id"), primary_key=True + ) + synced_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) -class Bookmark(Base): +class Bookmark(db.Model): __tablename__ = "Bookmark" - repo_id = Column(UUID(as_uuid=True), primary_key=True) - type = Column(ENUM(BookmarkType), primary_key=True) - bookmark = Column(String) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + repo_id = db.Column(UUID(as_uuid=True), primary_key=True) + type = db.Column(ENUM(BookmarkType), primary_key=True) + bookmark = db.Column(db.String) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) -class BookmarkMergeToDeployBroker(Base): +class BookmarkMergeToDeployBroker(db.Model): __tablename__ = "BookmarkMergeToDeployBroker" - repo_id = Column(UUID(as_uuid=True), primary_key=True) - bookmark = Column(String) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + repo_id = db.Column(UUID(as_uuid=True), primary_key=True) + bookmark = db.Column(db.String) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) @property diff --git a/apiserver/dora/store/models/code/workflows/workflows.py b/apiserver/dora/store/models/code/workflows/workflows.py index 882039332..e02be6b1f 100644 --- a/apiserver/dora/store/models/code/workflows/workflows.py +++ b/apiserver/dora/store/models/code/workflows/workflows.py @@ -1,9 +1,9 @@ import uuid -from sqlalchemy import Column, String, Boolean, DateTime, ForeignKey, Integer, func +from sqlalchemy import func from sqlalchemy.dialects.postgresql import UUID, JSONB, ENUM -from dora.store import Base +from dora.store import db from dora.store.models.code.workflows.enums import ( RepoWorkflowType, RepoWorkflowProviders, @@ -11,52 +11,52 @@ ) -class RepoWorkflow(Base): +class RepoWorkflow(db.Model): __tablename__ = "RepoWorkflow" - id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - org_repo_id = Column(UUID(as_uuid=True), ForeignKey("OrgRepo.id")) - type = Column(ENUM(RepoWorkflowType)) - provider = Column(ENUM(RepoWorkflowProviders)) - provider_workflow_id = Column(String, nullable=False) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + org_repo_id = db.Column(UUID(as_uuid=True), db.ForeignKey("OrgRepo.id")) + type = db.Column(ENUM(RepoWorkflowType)) + provider = db.Column(ENUM(RepoWorkflowProviders)) + provider_workflow_id = db.Column(db.String, nullable=False) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) - meta = Column(JSONB, default="{}") - is_active = Column(Boolean, default=True) - name = Column(String) + meta = db.Column(JSONB, default="{}") + is_active = db.Column(db.Boolean, default=True) + name = db.Column(db.String) -class RepoWorkflowRuns(Base): +class RepoWorkflowRuns(db.Model): __tablename__ = "RepoWorkflowRuns" - id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - repo_workflow_id = Column(UUID(as_uuid=True), ForeignKey("RepoWorkflow.id")) - provider_workflow_run_id = Column(String, nullable=False) - event_actor = Column(String) - head_branch = Column(String) - status = Column(ENUM(RepoWorkflowRunsStatus)) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + repo_workflow_id = db.Column(UUID(as_uuid=True), db.ForeignKey("RepoWorkflow.id")) + provider_workflow_run_id = db.Column(db.String, nullable=False) + event_actor = db.Column(db.String) + head_branch = db.Column(db.String) + status = db.Column(ENUM(RepoWorkflowRunsStatus)) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) - conducted_at = Column(DateTime(timezone=True), server_default=func.now()) - meta = Column(JSONB, default="{}") - duration = Column(Integer) - html_url = Column(String) + conducted_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + meta = db.Column(JSONB, default="{}") + duration = db.Column(db.Integer) + html_url = db.Column(db.String) def __hash__(self): return hash(self.id) -class RepoWorkflowRunsBookmark(Base): +class RepoWorkflowRunsBookmark(db.Model): __tablename__ = "RepoWorkflowRunsBookmark" - id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - repo_workflow_id = Column(UUID(as_uuid=True), ForeignKey("RepoWorkflow.id")) - bookmark = Column(String) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + repo_workflow_id = db.Column(UUID(as_uuid=True), db.ForeignKey("RepoWorkflow.id")) + bookmark = db.Column(db.String) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) diff --git a/apiserver/dora/store/models/core/organization.py b/apiserver/dora/store/models/core/organization.py index 04bb869ac..c275de442 100644 --- a/apiserver/dora/store/models/core/organization.py +++ b/apiserver/dora/store/models/core/organization.py @@ -1,21 +1,16 @@ -from sqlalchemy import ( - Column, - String, - DateTime, -) from sqlalchemy.dialects.postgresql import UUID, ARRAY -from dora.store import Base +from dora.store import db -class Organization(Base): +class Organization(db.Model): __tablename__ = "Organization" - id = Column(UUID(as_uuid=True), primary_key=True) - name = Column(String) - created_at = Column(DateTime(timezone=True)) - domain = Column(String) - other_domains = Column(ARRAY(String)) + id = db.Column(UUID(as_uuid=True), primary_key=True) + name = db.Column(db.String) + created_at = db.Column(db.DateTime(timezone=True)) + domain = db.Column(db.String) + other_domains = db.Column(ARRAY(db.String)) def __eq__(self, other): diff --git a/apiserver/dora/store/models/core/teams.py b/apiserver/dora/store/models/core/teams.py index d4dff0d05..7a58b79b4 100644 --- a/apiserver/dora/store/models/core/teams.py +++ b/apiserver/dora/store/models/core/teams.py @@ -1,31 +1,26 @@ import uuid from sqlalchemy import ( - Column, - String, - DateTime, - ForeignKey, func, - Boolean, ) from sqlalchemy.dialects.postgresql import UUID, ARRAY -from dora.store import Base +from dora.store import db -class Team(Base): +class Team(db.Model): __tablename__ = "Team" - id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - org_id = Column(UUID(as_uuid=True), ForeignKey("Organization.id")) - name = Column(String) - member_ids = Column(ARRAY(UUID(as_uuid=True)), nullable=False) - manager_id = Column(UUID(as_uuid=True), ForeignKey("Users.id")) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + org_id = db.Column(UUID(as_uuid=True), db.ForeignKey("Organization.id")) + name = db.Column(db.String) + member_ids = db.Column(ARRAY(UUID(as_uuid=True)), nullable=False) + manager_id = db.Column(UUID(as_uuid=True), db.ForeignKey("Users.id")) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) - is_deleted = Column(Boolean) + is_deleted = db.Column(db.Boolean) def __hash__(self): return hash(self.id) diff --git a/apiserver/dora/store/models/core/users.py b/apiserver/dora/store/models/core/users.py index 8b922c3ee..e34e005b8 100644 --- a/apiserver/dora/store/models/core/users.py +++ b/apiserver/dora/store/models/core/users.py @@ -1,26 +1,21 @@ from sqlalchemy import ( - Column, - String, - DateTime, - ForeignKey, func, - Boolean, ) from sqlalchemy.dialects.postgresql import UUID -from dora.store import Base +from dora.store import db -class Users(Base): +class Users(db.Model): __tablename__ = "Users" - id = Column(UUID(as_uuid=True), primary_key=True) - org_id = Column(UUID(as_uuid=True), ForeignKey("Organization.id")) - name = Column(String) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + id = db.Column(UUID(as_uuid=True), primary_key=True) + org_id = db.Column(UUID(as_uuid=True), db.ForeignKey("Organization.id")) + name = db.Column(db.String) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) - primary_email = Column(String) - is_deleted = Column(Boolean, default=False) - avatar_url = Column(String) + primary_email = db.Column(db.String) + is_deleted = db.Column(db.Boolean, default=False) + avatar_url = db.Column(db.String) diff --git a/apiserver/dora/store/models/incidents/incidents.py b/apiserver/dora/store/models/incidents/incidents.py index bf0ee0921..464744e0c 100644 --- a/apiserver/dora/store/models/incidents/incidents.py +++ b/apiserver/dora/store/models/incidents/incidents.py @@ -1,64 +1,59 @@ from sqlalchemy import ( - Column, - String, - DateTime, - ForeignKey, func, - Integer, ) from sqlalchemy.dialects.postgresql import UUID, ARRAY, JSONB, ENUM -from dora.store import Base +from dora.store import db from dora.store.models.incidents.enums import IncidentType, IncidentBookmarkType -class Incident(Base): +class Incident(db.Model): __tablename__ = "Incident" - id = Column(UUID(as_uuid=True), primary_key=True) - provider = Column(String) - key = Column(String) - incident_number = Column(Integer) - title = Column(String) - status = Column(String) - creation_date = Column(DateTime(timezone=True)) - acknowledged_date = Column(DateTime(timezone=True)) - resolved_date = Column(DateTime(timezone=True)) - assigned_to = Column(String) - assignees = Column(ARRAY(String)) - incident_type = Column(ENUM(IncidentType), default=IncidentType.INCIDENT) - meta = Column(JSONB, default={}) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + id = db.Column(UUID(as_uuid=True), primary_key=True) + provider = db.Column(db.String) + key = db.Column(db.String) + incident_number = db.Column(db.Integer) + title = db.Column(db.String) + status = db.Column(db.String) + creation_date = db.Column(db.DateTime(timezone=True)) + acknowledged_date = db.Column(db.DateTime(timezone=True)) + resolved_date = db.Column(db.DateTime(timezone=True)) + assigned_to = db.Column(db.String) + assignees = db.Column(ARRAY(db.String)) + incident_type = db.Column(ENUM(IncidentType), default=IncidentType.INCIDENT) + meta = db.Column(JSONB, default={}) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) def __hash__(self): return hash(self.id) -class IncidentOrgIncidentServiceMap(Base): +class IncidentOrgIncidentServiceMap(db.Model): __tablename__ = "IncidentOrgIncidentServiceMap" - incident_id = Column( - UUID(as_uuid=True), ForeignKey("Incident.id"), primary_key=True + incident_id = db.Column( + UUID(as_uuid=True), db.ForeignKey("Incident.id"), primary_key=True ) - service_id = Column( - UUID(as_uuid=True), ForeignKey("OrgIncidentService.id"), primary_key=True + service_id = db.Column( + UUID(as_uuid=True), db.ForeignKey("OrgIncidentService.id"), primary_key=True ) -class IncidentsBookmark(Base): +class IncidentsBookmark(db.Model): __tablename__ = "IncidentsBookmark" - id = Column(UUID(as_uuid=True), primary_key=True) - provider = Column(String) - entity_id = Column(UUID(as_uuid=True)) - entity_type = Column( + id = db.Column(UUID(as_uuid=True), primary_key=True) + provider = db.Column(db.String) + entity_id = db.Column(UUID(as_uuid=True)) + entity_type = db.Column( ENUM(IncidentBookmarkType), default=IncidentBookmarkType.SERVICE ) - bookmark = Column(DateTime(timezone=True), server_default=func.now()) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + bookmark = db.Column(db.DateTime(timezone=True), server_default=func.now()) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) diff --git a/apiserver/dora/store/models/incidents/services.py b/apiserver/dora/store/models/incidents/services.py index fec5495f8..14b487c32 100644 --- a/apiserver/dora/store/models/incidents/services.py +++ b/apiserver/dora/store/models/incidents/services.py @@ -1,51 +1,45 @@ from sqlalchemy import ( - Column, - String, - DateTime, - ForeignKey, func, - Boolean, - Integer, ) from sqlalchemy.dialects.postgresql import UUID, ARRAY, JSONB, ENUM from sqlalchemy.orm import relationship -from dora.store import Base +from dora.store import db from dora.store.models.incidents import IncidentSource -class OrgIncidentService(Base): +class OrgIncidentService(db.Model): __tablename__ = "OrgIncidentService" - id = Column(UUID(as_uuid=True), primary_key=True) - org_id = Column(UUID(as_uuid=True), ForeignKey("Organization.id")) - name = Column(String) - provider = Column(String) - key = Column(String) - auto_resolve_timeout = Column(Integer) - acknowledgement_timeout = Column(Integer) - created_by = Column(String) - provider_team_keys = Column(ARRAY(String)) - status = Column(String) - is_deleted = Column(Boolean, default=False) - meta = Column(JSONB, default={}) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + id = db.Column(UUID(as_uuid=True), primary_key=True) + org_id = db.Column(UUID(as_uuid=True), db.ForeignKey("Organization.id")) + name = db.Column(db.String) + provider = db.Column(db.String) + key = db.Column(db.String) + auto_resolve_timeout = db.Column(db.Integer) + acknowledgement_timeout = db.Column(db.Integer) + created_by = db.Column(db.String) + provider_team_keys = db.Column(ARRAY(db.String)) + status = db.Column(db.String) + is_deleted = db.Column(db.Boolean, default=False) + meta = db.Column(JSONB, default={}) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) - source_type = Column( + source_type = db.Column( ENUM(IncidentSource), default=IncidentSource.INCIDENT_SERVICE, nullable=False ) -class TeamIncidentService(Base): +class TeamIncidentService(db.Model): __tablename__ = "TeamIncidentService" - id = Column(UUID(as_uuid=True), primary_key=True) - team_id = Column(UUID(as_uuid=True), ForeignKey("Team.id")) - service_id = Column(UUID(as_uuid=True), ForeignKey("OrgIncidentService.id")) + id = db.Column(UUID(as_uuid=True), primary_key=True) + team_id = db.Column(UUID(as_uuid=True), db.ForeignKey("Team.id")) + service_id = db.Column(UUID(as_uuid=True), db.ForeignKey("OrgIncidentService.id")) OrgIncidentService = relationship("OrgIncidentService", lazy="joined") - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) diff --git a/apiserver/dora/store/models/integrations/integrations.py b/apiserver/dora/store/models/integrations/integrations.py index 29f49f66b..c0010e3f4 100644 --- a/apiserver/dora/store/models/integrations/integrations.py +++ b/apiserver/dora/store/models/integrations/integrations.py @@ -1,47 +1,47 @@ from sqlalchemy import ( - Column, - String, - DateTime, - ForeignKey, func, ) from sqlalchemy.dialects.postgresql import UUID, ARRAY, JSONB -from dora.store import Base +from dora.store import db from dora.store.models.integrations import UserIdentityProvider -class Integration(Base): +class Integration(db.Model): __tablename__ = "Integration" - org_id = Column(UUID(as_uuid=True), ForeignKey("Organization.id"), primary_key=True) - name = Column(String, primary_key=True) - generated_by = Column(UUID(as_uuid=True), ForeignKey("Users.id"), nullable=True) - access_token_enc_chunks = Column(ARRAY(String)) - refresh_token_enc_chunks = Column(ARRAY(String)) - provider_meta = Column(JSONB) - scopes = Column(ARRAY(String)) - access_token_valid_till = Column(DateTime(timezone=True)) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + org_id = db.Column( + UUID(as_uuid=True), db.ForeignKey("Organization.id"), primary_key=True + ) + name = db.Column(db.String, primary_key=True) + generated_by = db.Column( + UUID(as_uuid=True), db.ForeignKey("Users.id"), nullable=True + ) + access_token_enc_chunks = db.Column(ARRAY(db.String)) + refresh_token_enc_chunks = db.Column(ARRAY(db.String)) + provider_meta = db.Column(JSONB) + scopes = db.Column(ARRAY(db.String)) + access_token_valid_till = db.Column(db.DateTime(timezone=True)) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) -class UserIdentity(Base): +class UserIdentity(db.Model): __tablename__ = "UserIdentity" - user_id = Column(UUID(as_uuid=True), primary_key=True) - provider = Column(String, primary_key=True) - token = Column(String) - username = Column(String) - refresh_token = Column(String) - org_id = Column(UUID(as_uuid=True), ForeignKey("Organization.id")) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + user_id = db.Column(UUID(as_uuid=True), primary_key=True) + provider = db.Column(db.String, primary_key=True) + token = db.Column(db.String) + username = db.Column(db.String) + refresh_token = db.Column(db.String) + org_id = db.Column(UUID(as_uuid=True), db.ForeignKey("Organization.id")) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) - meta = Column(JSONB) + meta = db.Column(JSONB) @property def avatar_url(self): diff --git a/apiserver/dora/store/models/settings/configuration_settings.py b/apiserver/dora/store/models/settings/configuration_settings.py index 1ad56aa30..e5613ee11 100644 --- a/apiserver/dora/store/models/settings/configuration_settings.py +++ b/apiserver/dora/store/models/settings/configuration_settings.py @@ -1,9 +1,9 @@ from enum import Enum -from sqlalchemy import Column, DateTime, ForeignKey, func, Boolean +from sqlalchemy import func from sqlalchemy.dialects.postgresql import UUID, ENUM, JSONB -from dora.store import Base +from dora.store import db from dora.store.models.settings.enums import EntityType """ @@ -18,16 +18,16 @@ class SettingType(Enum): EXCLUDED_PRS_SETTING = "EXCLUDED_PRS_SETTING" -class Settings(Base): +class Settings(db.Model): __tablename__ = "Settings" - entity_id = Column(UUID(as_uuid=True), primary_key=True, nullable=False) - entity_type = Column(ENUM(EntityType), primary_key=True, nullable=False) - setting_type = Column(ENUM(SettingType), primary_key=True, nullable=False) - updated_by = Column(UUID(as_uuid=True), ForeignKey("Users.id")) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + entity_id = db.Column(UUID(as_uuid=True), primary_key=True, nullable=False) + entity_type = db.Column(ENUM(EntityType), primary_key=True, nullable=False) + setting_type = db.Column(ENUM(SettingType), primary_key=True, nullable=False) + updated_by = db.Column(UUID(as_uuid=True), db.ForeignKey("Users.id")) + created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column( + db.DateTime(timezone=True), server_default=func.now(), onupdate=func.now() ) - data = Column(JSONB, default="{}") - is_deleted = Column(Boolean, default=False) + data = db.Column(JSONB, default="{}") + is_deleted = db.Column(db.Boolean, default=False) diff --git a/apiserver/dora/store/repos/code.py b/apiserver/dora/store/repos/code.py index cc079a4de..6f68a7534 100644 --- a/apiserver/dora/store/repos/code.py +++ b/apiserver/dora/store/repos/code.py @@ -5,7 +5,7 @@ from sqlalchemy import or_ from sqlalchemy.orm import defer -from dora.store import rollback_on_exc, session +from dora.store import db from dora.store.models.code import ( PullRequest, PullRequestEvent, @@ -22,53 +22,47 @@ class CodeRepoService: - @rollback_on_exc def get_active_org_repos(self, org_id: str) -> List[OrgRepo]: return ( - session.query(OrgRepo) + db.session.query(OrgRepo) .filter(OrgRepo.org_id == org_id, OrgRepo.is_active.is_(True)) .all() ) - @rollback_on_exc def update_org_repos(self, org_repos: List[OrgRepo]): - [session.merge(org_repo) for org_repo in org_repos] - session.commit() + [db.session.merge(org_repo) for org_repo in org_repos] + db.session.commit() - @rollback_on_exc def save_pull_requests_data( self, pull_requests: List[PullRequest], pull_request_commits: List[PullRequestCommit], pull_request_events: List[PullRequestEvent], ): - [session.merge(pull_request) for pull_request in pull_requests] + [db.session.merge(pull_request) for pull_request in pull_requests] [ - session.merge(pull_request_commit) + db.session.merge(pull_request_commit) for pull_request_commit in pull_request_commits ] [ - session.merge(pull_request_event) + db.session.merge(pull_request_event) for pull_request_event in pull_request_events ] - session.commit() + db.session.commit() - @rollback_on_exc def update_prs(self, prs: List[PullRequest]): - [session.merge(pr) for pr in prs] - session.commit() + [db.session.merge(pr) for pr in prs] + db.session.commit() - @rollback_on_exc def save_revert_pr_mappings( self, revert_pr_mappings: List[PullRequestRevertPRMapping] ): - [session.merge(revert_pr_map) for revert_pr_map in revert_pr_mappings] - session.commit() + [db.session.merge(revert_pr_map) for revert_pr_map in revert_pr_mappings] + db.session.commit() - @rollback_on_exc def get_org_repo_bookmark(self, org_repo: OrgRepo, bookmark_type): return ( - session.query(Bookmark) + db.session.query(Bookmark) .filter( and_( Bookmark.repo_id == org_repo.id, @@ -78,19 +72,16 @@ def get_org_repo_bookmark(self, org_repo: OrgRepo, bookmark_type): .one_or_none() ) - @rollback_on_exc def update_org_repo_bookmark(self, bookmark: Bookmark): - session.merge(bookmark) - session.commit() + db.session.merge(bookmark) + db.session.commit() - @rollback_on_exc def get_repo_by_id(self, repo_id: str) -> Optional[OrgRepo]: - return session.query(OrgRepo).filter(OrgRepo.id == repo_id).one_or_none() + return db.session.query(OrgRepo).filter(OrgRepo.id == repo_id).one_or_none() - @rollback_on_exc def get_repo_pr_by_number(self, repo_id: str, pr_number) -> Optional[PullRequest]: return ( - session.query(PullRequest) + db.session.query(PullRequest) .options(defer(PullRequest.data)) .filter( and_( @@ -100,34 +91,31 @@ def get_repo_pr_by_number(self, repo_id: str, pr_number) -> Optional[PullRequest .one_or_none() ) - @rollback_on_exc def get_pr_events(self, pr_model: PullRequest): if not pr_model: return [] pr_events = ( - session.query(PullRequestEvent) + db.session.query(PullRequestEvent) .options(defer(PullRequestEvent.data)) .filter(PullRequestEvent.pull_request_id == pr_model.id) .all() ) return pr_events - @rollback_on_exc def get_prs_by_ids(self, pr_ids: List[str]): query = ( - session.query(PullRequest) + db.session.query(PullRequest) .options(defer(PullRequest.data)) .filter(PullRequest.id.in_(pr_ids)) ) return query.all() - @rollback_on_exc def get_prs_by_head_branch_match_strings( self, repo_ids: List[str], match_strings: List[str] ) -> List[PullRequest]: query = ( - session.query(PullRequest) + db.session.query(PullRequest) .options(defer(PullRequest.data)) .filter( and_( @@ -145,12 +133,11 @@ def get_prs_by_head_branch_match_strings( return query.all() - @rollback_on_exc def get_reverted_prs_by_numbers( self, repo_ids: List[str], numbers: List[str] ) -> List[PullRequest]: query = ( - session.query(PullRequest) + db.session.query(PullRequest) .options(defer(PullRequest.data)) .filter( and_( @@ -163,31 +150,27 @@ def get_reverted_prs_by_numbers( return query.all() - @rollback_on_exc def get_active_team_repos_by_team_id(self, team_id: str) -> List[TeamRepos]: return ( - session.query(TeamRepos) + db.session.query(TeamRepos) .filter(TeamRepos.team_id == team_id, TeamRepos.is_active.is_(True)) .all() ) - @rollback_on_exc def get_active_team_repos_by_team_ids(self, team_ids: List[str]) -> List[TeamRepos]: return ( - session.query(TeamRepos) + db.session.query(TeamRepos) .filter(TeamRepos.team_id.in_(team_ids), TeamRepos.is_active.is_(True)) .all() ) - @rollback_on_exc def get_active_org_repos_by_ids(self, repo_ids: List[str]) -> List[OrgRepo]: return ( - session.query(OrgRepo) + db.session.query(OrgRepo) .filter(OrgRepo.id.in_(repo_ids), OrgRepo.is_active.is_(True)) .all() ) - @rollback_on_exc def get_prs_merged_in_interval( self, repo_ids: List[str], @@ -196,7 +179,7 @@ def get_prs_merged_in_interval( base_branches: List[str] = None, has_non_null_mtd=False, ) -> List[PullRequest]: - query = session.query(PullRequest).options(defer(PullRequest.data)) + query = db.session.query(PullRequest).options(defer(PullRequest.data)) query = self._filter_prs_by_repo_ids(query, repo_ids) query = self._filter_prs_merged_in_interval(query, interval) @@ -211,19 +194,17 @@ def get_prs_merged_in_interval( return query.all() - @rollback_on_exc def get_pull_request_by_id(self, pr_id: str) -> PullRequest: return ( - session.query(PullRequest) + db.session.query(PullRequest) .options(defer(PullRequest.data)) .filter(PullRequest.id == pr_id) .one_or_none() ) - @rollback_on_exc def get_previous_pull_request(self, pull_request: PullRequest) -> PullRequest: return ( - session.query(PullRequest) + db.session.query(PullRequest) .options(defer(PullRequest.data)) .filter( PullRequest.repo_id == pull_request.repo_id, @@ -235,17 +216,15 @@ def get_previous_pull_request(self, pull_request: PullRequest) -> PullRequest: .first() ) - @rollback_on_exc def get_repos_by_ids(self, ids: List[str]) -> List[OrgRepo]: if not ids: return [] - return session.query(OrgRepo).filter(OrgRepo.id.in_(ids)).all() + return db.session.query(OrgRepo).filter(OrgRepo.id.in_(ids)).all() - @rollback_on_exc def get_team_repos(self, team_id) -> List[OrgRepo]: team_repos = ( - session.query(TeamRepos) + db.session.query(TeamRepos) .filter(and_(TeamRepos.team_id == team_id, TeamRepos.is_active == True)) .all() ) @@ -255,29 +234,26 @@ def get_team_repos(self, team_id) -> List[OrgRepo]: team_repo_ids = [tr.org_repo_id for tr in team_repos] return self.get_repos_by_ids(team_repo_ids) - @rollback_on_exc def get_merge_to_deploy_broker_bookmark( self, repo_id: str ) -> BookmarkMergeToDeployBroker: return ( - session.query(BookmarkMergeToDeployBroker) + db.session.query(BookmarkMergeToDeployBroker) .filter(BookmarkMergeToDeployBroker.repo_id == repo_id) .one_or_none() ) - @rollback_on_exc def update_merge_to_deploy_broker_bookmark( self, bookmark: BookmarkMergeToDeployBroker ): - session.merge(bookmark) - session.commit() + db.session.merge(bookmark) + db.session.commit() - @rollback_on_exc def get_prs_in_repo_merged_before_given_date_with_merge_to_deploy_as_null( self, repo_id: str, to_time: datetime ): return ( - session.query(PullRequest) + db.session.query(PullRequest) .options(defer(PullRequest.data)) .filter( PullRequest.repo_id == repo_id, @@ -288,12 +264,11 @@ def get_prs_in_repo_merged_before_given_date_with_merge_to_deploy_as_null( .all() ) - @rollback_on_exc def get_repo_revert_prs_mappings_updated_in_interval( self, repo_id, from_time, to_time ) -> List[PullRequestRevertPRMapping]: query = ( - session.query(PullRequestRevertPRMapping) + db.session.query(PullRequestRevertPRMapping) .join(PullRequest, PullRequest.id == PullRequestRevertPRMapping.pr_id) .filter( PullRequest.repo_id == repo_id, diff --git a/apiserver/dora/store/repos/core.py b/apiserver/dora/store/repos/core.py index 07377f47b..b479127b3 100644 --- a/apiserver/dora/store/repos/core.py +++ b/apiserver/dora/store/repos/core.py @@ -2,7 +2,7 @@ from sqlalchemy import and_ -from dora.store import session, rollback_on_exc +from dora.store import db from dora.store.models import UserIdentityProvider, Integration from dora.store.models.core import Organization, Team, Users from dora.utils.cryptography import get_crypto_service @@ -12,60 +12,55 @@ class CoreRepoService: def __init__(self): self._crypto = get_crypto_service() - @rollback_on_exc def get_org(self, org_id): return ( - session.query(Organization).filter(Organization.id == org_id).one_or_none() + db.session.query(Organization) + .filter(Organization.id == org_id) + .one_or_none() ) - @rollback_on_exc def get_org_by_name(self, org_name: str): return ( - session.query(Organization) + db.session.query(Organization) .filter(Organization.name == org_name) .one_or_none() ) - @rollback_on_exc def get_team(self, team_id: str) -> Team: return ( - session.query(Team) + db.session.query(Team) .filter(Team.id == team_id, Team.is_deleted.is_(False)) .one_or_none() ) - @rollback_on_exc def delete_team(self, team_id: str): - team = session.query(Team).filter(Team.id == team_id).one_or_none() + team = db.session.query(Team).filter(Team.id == team_id).one_or_none() if not team: return None team.is_deleted = True - session.merge(team) - session.commit() - return session.query(Team).filter(Team.id == team_id).one_or_none() + db.session.merge(team) + db.session.commit() + return db.session.query(Team).filter(Team.id == team_id).one_or_none() - @rollback_on_exc def get_user(self, user_id) -> Optional[Users]: - return session.query(Users).filter(Users.id == user_id).one_or_none() + return db.session.query(Users).filter(Users.id == user_id).one_or_none() - @rollback_on_exc def get_org_integrations_for_names(self, org_id: str, provider_names: List[str]): return ( - session.query(Integration) + db.session.query(Integration) .filter( and_(Integration.org_id == org_id, Integration.name.in_(provider_names)) ) .all() ) - @rollback_on_exc def get_access_token(self, org_id, provider: UserIdentityProvider) -> Optional[str]: user_identity: Integration = ( - session.query(Integration) + db.session.query(Integration) .filter( and_(Integration.org_id == org_id, Integration.name == provider.value) ) diff --git a/apiserver/dora/store/repos/incidents.py b/apiserver/dora/store/repos/incidents.py index 1664718b2..52967b627 100644 --- a/apiserver/dora/store/repos/incidents.py +++ b/apiserver/dora/store/repos/incidents.py @@ -2,7 +2,7 @@ from sqlalchemy import and_ -from dora.store import rollback_on_exc, session +from dora.store import db from dora.store.models.incidents import ( Incident, IncidentFilter, @@ -19,20 +19,17 @@ class IncidentsRepoService: - @rollback_on_exc def get_org_incident_services(self, org_id: str) -> List[OrgIncidentService]: return ( - session.query(OrgIncidentService) + db.session.query(OrgIncidentService) .filter(OrgIncidentService.org_id == org_id) .all() ) - @rollback_on_exc def update_org_incident_services(self, incident_services: List[OrgIncidentService]): - [session.merge(incident_service) for incident_service in incident_services] - session.commit() + [db.session.merge(incident_service) for incident_service in incident_services] + db.session.commit() - @rollback_on_exc def get_incidents_bookmark( self, entity_id: str, @@ -40,7 +37,7 @@ def get_incidents_bookmark( provider: IncidentProvider, ) -> IncidentsBookmark: return ( - session.query(IncidentsBookmark) + db.session.query(IncidentsBookmark) .filter( and_( IncidentsBookmark.entity_id == entity_id, @@ -51,25 +48,22 @@ def get_incidents_bookmark( .one_or_none() ) - @rollback_on_exc def save_incidents_bookmark(self, bookmark: IncidentsBookmark): - session.merge(bookmark) - session.commit() + db.session.merge(bookmark) + db.session.commit() - @rollback_on_exc def save_incidents_data( self, incidents: List[Incident], incident_org_incident_service_map: List[IncidentOrgIncidentServiceMap], ): - [session.merge(incident) for incident in incidents] + [db.session.merge(incident) for incident in incidents] [ - session.merge(incident_service_map) + db.session.merge(incident_service_map) for incident_service_map in incident_org_incident_service_map ] - session.commit() + db.session.commit() - @rollback_on_exc def get_resolved_team_incidents( self, team_id: str, interval: Interval, incident_filter: IncidentFilter = None ) -> List[Incident]: @@ -84,7 +78,6 @@ def get_resolved_team_incidents( return query.all() - @rollback_on_exc def get_team_incidents( self, team_id: str, interval: Interval, incident_filter: IncidentFilter = None ) -> List[Incident]: @@ -96,12 +89,11 @@ def get_team_incidents( return query.all() - @rollback_on_exc def get_incident_by_key_type_and_provider( self, key: str, incident_type: IncidentType, provider: IncidentProvider ) -> Incident: return ( - session.query(Incident) + db.session.query(Incident) .filter( and_( Incident.key == key, @@ -116,7 +108,7 @@ def _get_team_incidents_query( self, team_id: str, incident_filter: IncidentFilter = None ): query = ( - session.query(Incident) + db.session.query(Incident) .join( IncidentOrgIncidentServiceMap, Incident.id == IncidentOrgIncidentServiceMap.incident_id, diff --git a/apiserver/dora/store/repos/settings.py b/apiserver/dora/store/repos/settings.py index 295c8cd5b..27a6e4a60 100644 --- a/apiserver/dora/store/repos/settings.py +++ b/apiserver/dora/store/repos/settings.py @@ -2,7 +2,7 @@ from sqlalchemy import and_ -from dora.store import session, rollback_on_exc +from dora.store import db from dora.store.models import ( Settings, SettingType, @@ -13,12 +13,11 @@ class SettingsRepoService: - @rollback_on_exc def get_setting( self, entity_id: str, entity_type: EntityType, setting_type: SettingType ) -> Optional[Settings]: return ( - session.query(Settings) + db.session.query(Settings) .filter( and_( Settings.setting_type == setting_type, @@ -30,16 +29,14 @@ def get_setting( .one_or_none() ) - @rollback_on_exc def create_settings(self, settings: List[Settings]) -> List[Settings]: - [session.merge(setting) for setting in settings] - session.commit() + [db.session.merge(setting) for setting in settings] + db.session.commit() return settings - @rollback_on_exc def save_setting(self, setting: Settings) -> Optional[Settings]: - session.merge(setting) - session.commit() + db.session.merge(setting) + db.session.commit() return self.get_setting( entity_id=setting.entity_id, @@ -47,7 +44,6 @@ def save_setting(self, setting: Settings) -> Optional[Settings]: setting_type=setting.setting_type, ) - @rollback_on_exc def delete_setting( self, entity_id: str, @@ -62,11 +58,10 @@ def delete_setting( setting.is_deleted = True setting.updated_by = deleted_by.id setting.updated_at = time_now() - session.merge(setting) - session.commit() + db.session.merge(setting) + db.session.commit() return setting - @rollback_on_exc def get_settings( self, entity_id: str, @@ -74,7 +69,7 @@ def get_settings( setting_types: List[SettingType], ) -> Optional[Settings]: return ( - session.query(Settings) + db.session.query(Settings) .filter( and_( Settings.setting_type.in_(setting_types), diff --git a/apiserver/dora/store/repos/workflows.py b/apiserver/dora/store/repos/workflows.py index 210848def..4c7ed1146 100644 --- a/apiserver/dora/store/repos/workflows.py +++ b/apiserver/dora/store/repos/workflows.py @@ -4,7 +4,7 @@ from sqlalchemy.orm import defer from sqlalchemy import and_ -from dora.store import session, rollback_on_exc +from dora.store import db from dora.store.models.code.workflows.enums import ( RepoWorkflowRunsStatus, RepoWorkflowType, @@ -20,13 +20,12 @@ class WorkflowRepoService: - @rollback_on_exc def get_active_repo_workflows_by_repo_ids_and_providers( self, repo_ids: List[str], providers: List[RepoWorkflowProviders] ) -> List[RepoWorkflow]: return ( - session.query(RepoWorkflow) + db.session.query(RepoWorkflow) .options(defer(RepoWorkflow.meta)) .filter( RepoWorkflow.org_repo_id.in_(repo_ids), @@ -36,12 +35,11 @@ def get_active_repo_workflows_by_repo_ids_and_providers( .all() ) - @rollback_on_exc def get_repo_workflow_run_by_provider_workflow_run_id( self, repo_workflow_id: str, provider_workflow_run_id: str ) -> RepoWorkflowRuns: return ( - session.query(RepoWorkflowRuns) + db.session.query(RepoWorkflowRuns) .filter( RepoWorkflowRuns.repo_workflow_id == repo_workflow_id, RepoWorkflowRuns.provider_workflow_run_id == provider_workflow_run_id, @@ -49,32 +47,31 @@ def get_repo_workflow_run_by_provider_workflow_run_id( .one_or_none() ) - @rollback_on_exc def save_repo_workflow_runs(self, repo_workflow_runs: List[RepoWorkflowRuns]): - [session.merge(repo_workflow_run) for repo_workflow_run in repo_workflow_runs] - session.commit() + [ + db.session.merge(repo_workflow_run) + for repo_workflow_run in repo_workflow_runs + ] + db.session.commit() - @rollback_on_exc def get_repo_workflow_runs_bookmark( self, repo_workflow_id: str ) -> RepoWorkflowRunsBookmark: return ( - session.query(RepoWorkflowRunsBookmark) + db.session.query(RepoWorkflowRunsBookmark) .filter(RepoWorkflowRunsBookmark.repo_workflow_id == repo_workflow_id) .one_or_none() ) - @rollback_on_exc def update_repo_workflow_runs_bookmark(self, bookmark: RepoWorkflowRunsBookmark): - session.merge(bookmark) - session.commit() + db.session.merge(bookmark) + db.session.commit() - @rollback_on_exc def get_repo_workflow_by_repo_ids( self, repo_ids: List[str], type: RepoWorkflowType ) -> List[RepoWorkflow]: return ( - session.query(RepoWorkflow) + db.session.query(RepoWorkflow) .options(defer(RepoWorkflow.meta)) .filter( and_( @@ -86,10 +83,9 @@ def get_repo_workflow_by_repo_ids( .all() ) - @rollback_on_exc def get_repo_workflows_by_repo_id(self, repo_id: str) -> List[RepoWorkflow]: return ( - session.query(RepoWorkflow) + db.session.query(RepoWorkflow) .options(defer(RepoWorkflow.meta)) .filter( RepoWorkflow.org_repo_id == repo_id, @@ -98,12 +94,11 @@ def get_repo_workflows_by_repo_id(self, repo_id: str) -> List[RepoWorkflow]: .all() ) - @rollback_on_exc def get_successful_repo_workflows_runs_by_repo_ids( self, repo_ids: List[str], interval: Interval, workflow_filter: WorkflowFilter ) -> List[Tuple[RepoWorkflow, RepoWorkflowRuns]]: query = ( - session.query(RepoWorkflow, RepoWorkflowRuns) + db.session.query(RepoWorkflow, RepoWorkflowRuns) .options(defer(RepoWorkflow.meta), defer(RepoWorkflowRuns.meta)) .join( RepoWorkflowRuns, RepoWorkflow.id == RepoWorkflowRuns.repo_workflow_id @@ -122,7 +117,6 @@ def get_successful_repo_workflows_runs_by_repo_ids( return query.all() - @rollback_on_exc def get_repos_workflow_runs_by_repo_ids( self, repo_ids: List[str], @@ -130,7 +124,7 @@ def get_repos_workflow_runs_by_repo_ids( workflow_filter: WorkflowFilter = None, ) -> List[Tuple[RepoWorkflow, RepoWorkflowRuns]]: query = ( - session.query(RepoWorkflow, RepoWorkflowRuns) + db.session.query(RepoWorkflow, RepoWorkflowRuns) .options(defer(RepoWorkflow.meta), defer(RepoWorkflowRuns.meta)) .join( RepoWorkflowRuns, RepoWorkflow.id == RepoWorkflowRuns.repo_workflow_id @@ -147,24 +141,22 @@ def get_repos_workflow_runs_by_repo_ids( return query.all() - @rollback_on_exc def get_repo_workflow_run_by_id( self, repo_workflow_run_id: str ) -> Tuple[RepoWorkflow, RepoWorkflowRuns]: return ( - session.query(RepoWorkflow, RepoWorkflowRuns) + db.session.query(RepoWorkflow, RepoWorkflowRuns) .options(defer(RepoWorkflow.meta), defer(RepoWorkflowRuns.meta)) .join(RepoWorkflow, RepoWorkflow.id == RepoWorkflowRuns.repo_workflow_id) .filter(RepoWorkflowRuns.id == repo_workflow_run_id) .one_or_none() ) - @rollback_on_exc def get_previous_workflow_run( self, workflow_run: RepoWorkflowRuns ) -> Tuple[RepoWorkflow, RepoWorkflowRuns]: return ( - session.query(RepoWorkflow, RepoWorkflowRuns) + db.session.query(RepoWorkflow, RepoWorkflowRuns) .options(defer(RepoWorkflow.meta), defer(RepoWorkflowRuns.meta)) .join(RepoWorkflow, RepoWorkflow.id == RepoWorkflowRuns.repo_workflow_id) .filter( @@ -176,12 +168,11 @@ def get_previous_workflow_run( .first() ) - @rollback_on_exc def get_repo_workflow_runs_conducted_after_time( self, repo_id: str, from_time: datetime = None, limit_value: int = 500 ): query = ( - session.query(RepoWorkflowRuns) + db.session.query(RepoWorkflowRuns) .options(defer(RepoWorkflowRuns.meta)) .join(RepoWorkflow, RepoWorkflow.id == RepoWorkflowRuns.repo_workflow_id) .filter( diff --git a/apiserver/requirements.txt b/apiserver/requirements.txt index 4e3a3865a..05076fec4 100644 --- a/apiserver/requirements.txt +++ b/apiserver/requirements.txt @@ -11,3 +11,4 @@ python-redis-lock==4.0.0 psycopg2==2.9.3 python-dotenv==1.0.1 gunicorn==21.0.1 +Flask-SQLAlchemy==3.1.1 diff --git a/web-server/pages/api/internal/team/[team_id]/dora_metrics.ts b/web-server/pages/api/internal/team/[team_id]/dora_metrics.ts index 09d3f52ef..bd6ca802f 100644 --- a/web-server/pages/api/internal/team/[team_id]/dora_metrics.ts +++ b/web-server/pages/api/internal/team/[team_id]/dora_metrics.ts @@ -151,7 +151,6 @@ endpoint.handle.GET(getSchema, async (req, res) => { ) ]); - console.log('🚀 ~ endpoint.handle.GET ~ leadTimeResponse:', leadTimeResponse); return res.send({ lead_time_stats: leadTimeResponse.lead_time_stats, lead_time_trends: leadTimeResponse.lead_time_trends,