diff --git a/airflow/api_connexion/endpoints/config_endpoint.py b/airflow/api_connexion/endpoints/config_endpoint.py index 272d5f7342f56..a9353234f0b4e 100644 --- a/airflow/api_connexion/endpoints/config_endpoint.py +++ b/airflow/api_connexion/endpoints/config_endpoint.py @@ -31,7 +31,7 @@ def _conf_dict_to_config(conf_dict: dict) -> Config: - """Convert config dict to a Config object""" + """Convert config dict to a Config object.""" config = Config( sections=[ ConfigSection( @@ -44,12 +44,12 @@ def _conf_dict_to_config(conf_dict: dict) -> Config: def _option_to_text(config_option: ConfigOption) -> str: - """Convert a single config option to text""" + """Convert a single config option to text.""" return f"{config_option.key} = {config_option.value}" def _section_to_text(config_section: ConfigSection) -> str: - """Convert a single config section to text""" + """Convert a single config section to text.""" return ( f"[{config_section.name}]{LINE_SEP}" f"{LINE_SEP.join(_option_to_text(option) for option in config_section.options)}{LINE_SEP}" @@ -57,12 +57,12 @@ def _section_to_text(config_section: ConfigSection) -> str: def _config_to_text(config: Config) -> str: - """Convert the entire config to text""" + """Convert the entire config to text.""" return LINE_SEP.join(_section_to_text(s) for s in config.sections) def _config_to_json(config: Config) -> str: - """Convert a Config object to a JSON formatted string""" + """Convert a Config object to a JSON formatted string.""" return json.dumps(config_schema.dump(config), indent=4) diff --git a/airflow/api_connexion/endpoints/connection_endpoint.py b/airflow/api_connexion/endpoints/connection_endpoint.py index 770880c230b23..a86511ead24c6 100644 --- a/airflow/api_connexion/endpoints/connection_endpoint.py +++ b/airflow/api_connexion/endpoints/connection_endpoint.py @@ -45,7 +45,7 @@ @security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_CONNECTION)]) @provide_session def delete_connection(*, connection_id: str, session: Session = NEW_SESSION) -> APIResponse: - """Delete a connection entry""" + """Delete a connection entry.""" connection = session.query(Connection).filter_by(conn_id=connection_id).one_or_none() if connection is None: raise NotFound( @@ -59,7 +59,7 @@ def delete_connection(*, connection_id: str, session: Session = NEW_SESSION) -> @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONNECTION)]) @provide_session def get_connection(*, connection_id: str, session: Session = NEW_SESSION) -> APIResponse: - """Get a connection entry""" + """Get a connection entry.""" connection = session.query(Connection).filter(Connection.conn_id == connection_id).one_or_none() if connection is None: raise NotFound( @@ -79,7 +79,7 @@ def get_connections( order_by: str = "id", session: Session = NEW_SESSION, ) -> APIResponse: - """Get all connection entries""" + """Get all connection entries.""" to_replace = {"connection_id": "conn_id"} allowed_filter_attrs = ["connection_id", "conn_type", "description", "host", "port", "id"] @@ -100,7 +100,7 @@ def patch_connection( update_mask: UpdateMask = None, session: Session = NEW_SESSION, ) -> APIResponse: - """Update a connection entry""" + """Update a connection entry.""" try: data = connection_schema.load(request.json, partial=True) except ValidationError as err: @@ -134,7 +134,7 @@ def patch_connection( @security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_CONNECTION)]) @provide_session def post_connection(*, session: Session = NEW_SESSION) -> APIResponse: - """Create connection entry""" + """Create connection entry.""" body = request.json try: data = connection_schema.load(body) @@ -154,7 +154,9 @@ def post_connection(*, session: Session = NEW_SESSION) -> APIResponse: @security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_CONNECTION)]) def test_connection() -> APIResponse: """ - To test a connection, this method first creates an in-memory dummy conn_id & exports that to an + Test an API connection. + + This method first creates an in-memory dummy conn_id & exports that to an env var, as some hook classes tries to find out the conn from their __init__ method & errors out if not found. It also deletes the conn id env variable after the test. """ diff --git a/airflow/api_connexion/endpoints/dag_endpoint.py b/airflow/api_connexion/endpoints/dag_endpoint.py index f72d69590dae6..be66dc814e95b 100644 --- a/airflow/api_connexion/endpoints/dag_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_endpoint.py @@ -102,7 +102,7 @@ def get_dags( @security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG)]) @provide_session def patch_dag(*, dag_id: str, update_mask: UpdateMask = None, session: Session = NEW_SESSION) -> APIResponse: - """Update the specific DAG""" + """Update the specific DAG.""" try: patch_body = dag_schema.load(request.json, session=session) except ValidationError as err: diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py index 1f2a5a8959ca2..6678b76903bc8 100644 --- a/airflow/api_connexion/endpoints/dag_run_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py @@ -68,7 +68,7 @@ ) @provide_session def delete_dag_run(*, dag_id: str, dag_run_id: str, session: Session = NEW_SESSION) -> APIResponse: - """Delete a DAG Run""" + """Delete a DAG Run.""" if session.query(DagRun).filter(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id).delete() == 0: raise NotFound(detail=f"DAGRun with DAG ID: '{dag_id}' and DagRun ID: '{dag_run_id}' not found") return NoContent, HTTPStatus.NO_CONTENT @@ -237,7 +237,7 @@ def get_dag_runs( ) @provide_session def get_dag_runs_batch(*, session: Session = NEW_SESSION) -> APIResponse: - """Get list of DAG Runs""" + """Get list of DAG Runs.""" body = get_json_request_dict() try: data = dagruns_batch_form_schema.load(body) diff --git a/airflow/api_connexion/endpoints/dag_source_endpoint.py b/airflow/api_connexion/endpoints/dag_source_endpoint.py index ff5f627248516..42ccd4e5d8671 100644 --- a/airflow/api_connexion/endpoints/dag_source_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_source_endpoint.py @@ -30,7 +30,7 @@ @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_CODE)]) def get_dag_source(*, file_token: str) -> Response: - """Get source code using file token""" + """Get source code using file token.""" secret_key = current_app.config["SECRET_KEY"] auth_s = URLSafeSerializer(secret_key) try: diff --git a/airflow/api_connexion/endpoints/dataset_endpoint.py b/airflow/api_connexion/endpoints/dataset_endpoint.py index 8f224d696b701..42e8bb3c36c33 100644 --- a/airflow/api_connexion/endpoints/dataset_endpoint.py +++ b/airflow/api_connexion/endpoints/dataset_endpoint.py @@ -38,7 +38,7 @@ @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DATASET)]) @provide_session def get_dataset(uri: str, session: Session = NEW_SESSION) -> APIResponse: - """Get a Dataset""" + """Get a Dataset.""" dataset = ( session.query(DatasetModel) .filter(DatasetModel.uri == uri) @@ -64,7 +64,7 @@ def get_datasets( order_by: str = "id", session: Session = NEW_SESSION, ) -> APIResponse: - """Get datasets""" + """Get datasets.""" allowed_attrs = ["id", "uri", "created_at", "updated_at"] total_entries = session.query(func.count(DatasetModel.id)).scalar() @@ -96,7 +96,7 @@ def get_dataset_events( source_map_index: int | None = None, session: Session = NEW_SESSION, ) -> APIResponse: - """Get dataset events""" + """Get dataset events.""" allowed_attrs = ["source_dag_id", "source_task_id", "source_run_id", "source_map_index", "timestamp"] query = session.query(DatasetEvent) diff --git a/airflow/api_connexion/endpoints/event_log_endpoint.py b/airflow/api_connexion/endpoints/event_log_endpoint.py index 141bf4d005e58..94fa73a431597 100644 --- a/airflow/api_connexion/endpoints/event_log_endpoint.py +++ b/airflow/api_connexion/endpoints/event_log_endpoint.py @@ -36,7 +36,7 @@ @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_AUDIT_LOG)]) @provide_session def get_event_log(*, event_log_id: int, session: Session = NEW_SESSION) -> APIResponse: - """Get a log entry""" + """Get a log entry.""" event_log = session.query(Log).get(event_log_id) if event_log is None: raise NotFound("Event Log not found") @@ -53,7 +53,7 @@ def get_event_logs( order_by: str = "event_log_id", session: Session = NEW_SESSION, ) -> APIResponse: - """Get all log entries from event log""" + """Get all log entries from event log.""" to_replace = {"event_log_id": "id", "when": "dttm"} allowed_filter_attrs = [ "event_log_id", diff --git a/airflow/api_connexion/endpoints/extra_link_endpoint.py b/airflow/api_connexion/endpoints/extra_link_endpoint.py index 0bc5b2183608a..2b12667e7cdfa 100644 --- a/airflow/api_connexion/endpoints/extra_link_endpoint.py +++ b/airflow/api_connexion/endpoints/extra_link_endpoint.py @@ -44,7 +44,7 @@ def get_extra_links( task_id: str, session: Session = NEW_SESSION, ) -> APIResponse: - """Get extra links for task instance""" + """Get extra links for task instance.""" from airflow.models.taskinstance import TaskInstance dagbag: DagBag = get_airflow_app().dag_bag diff --git a/airflow/api_connexion/endpoints/health_endpoint.py b/airflow/api_connexion/endpoints/health_endpoint.py index a60da3c4bc558..f833a5d72815b 100644 --- a/airflow/api_connexion/endpoints/health_endpoint.py +++ b/airflow/api_connexion/endpoints/health_endpoint.py @@ -25,7 +25,7 @@ def get_health() -> APIResponse: - """Return the health of the airflow scheduler and metadatabase""" + """Return the health of the airflow scheduler and metadatabase.""" metadatabase_status = HEALTHY latest_scheduler_heartbeat = None scheduler_status = UNHEALTHY diff --git a/airflow/api_connexion/endpoints/import_error_endpoint.py b/airflow/api_connexion/endpoints/import_error_endpoint.py index 03f47526bc3fe..f5798fe99e8c4 100644 --- a/airflow/api_connexion/endpoints/import_error_endpoint.py +++ b/airflow/api_connexion/endpoints/import_error_endpoint.py @@ -36,7 +36,7 @@ @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_IMPORT_ERROR)]) @provide_session def get_import_error(*, import_error_id: int, session: Session = NEW_SESSION) -> APIResponse: - """Get an import error""" + """Get an import error.""" error = session.query(ImportErrorModel).get(import_error_id) if error is None: @@ -57,7 +57,7 @@ def get_import_errors( order_by: str = "import_error_id", session: Session = NEW_SESSION, ) -> APIResponse: - """Get all import errors""" + """Get all import errors.""" to_replace = {"import_error_id": "id"} allowed_filter_attrs = ["import_error_id", "timestamp", "filename"] total_entries = session.query(func.count(ImportErrorModel.id)).scalar() diff --git a/airflow/api_connexion/endpoints/log_endpoint.py b/airflow/api_connexion/endpoints/log_endpoint.py index 145d716754be0..388b164727e90 100644 --- a/airflow/api_connexion/endpoints/log_endpoint.py +++ b/airflow/api_connexion/endpoints/log_endpoint.py @@ -54,7 +54,7 @@ def get_log( token: str | None = None, session: Session = NEW_SESSION, ) -> APIResponse: - """Get logs for specific task instance""" + """Get logs for specific task instance.""" key = get_airflow_app().config["SECRET_KEY"] if not token: metadata = {} diff --git a/airflow/api_connexion/endpoints/plugin_endpoint.py b/airflow/api_connexion/endpoints/plugin_endpoint.py index b6efb774bb38d..a2febda42c9b8 100644 --- a/airflow/api_connexion/endpoints/plugin_endpoint.py +++ b/airflow/api_connexion/endpoints/plugin_endpoint.py @@ -27,7 +27,7 @@ @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_PLUGIN)]) @format_parameters({"limit": check_limit}) def get_plugins(*, limit: int, offset: int = 0) -> APIResponse: - """Get plugins endpoint""" + """Get plugins endpoint.""" plugins_info = get_plugin_info() collection = PluginCollection(plugins=plugins_info[offset:][:limit], total_entries=len(plugins_info)) return plugin_collection_schema.dump(collection) diff --git a/airflow/api_connexion/endpoints/pool_endpoint.py b/airflow/api_connexion/endpoints/pool_endpoint.py index de838b9c44945..e6f7903e0ca4d 100644 --- a/airflow/api_connexion/endpoints/pool_endpoint.py +++ b/airflow/api_connexion/endpoints/pool_endpoint.py @@ -38,7 +38,7 @@ @security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_POOL)]) @provide_session def delete_pool(*, pool_name: str, session: Session = NEW_SESSION) -> APIResponse: - """Delete a pool""" + """Delete a pool.""" if pool_name == "default_pool": raise BadRequest(detail="Default Pool can't be deleted") affected_count = session.query(Pool).filter(Pool.pool == pool_name).delete() @@ -50,7 +50,7 @@ def delete_pool(*, pool_name: str, session: Session = NEW_SESSION) -> APIRespons @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_POOL)]) @provide_session def get_pool(*, pool_name: str, session: Session = NEW_SESSION) -> APIResponse: - """Get a pool""" + """Get a pool.""" obj = session.query(Pool).filter(Pool.pool == pool_name).one_or_none() if obj is None: raise NotFound(detail=f"Pool with name:'{pool_name}' not found") @@ -67,7 +67,7 @@ def get_pools( offset: int | None = None, session: Session = NEW_SESSION, ) -> APIResponse: - """Get all pools""" + """Get all pools.""" to_replace = {"name": "pool"} allowed_filter_attrs = ["name", "slots", "id"] total_entries = session.query(func.count(Pool.id)).scalar() @@ -85,7 +85,7 @@ def patch_pool( update_mask: UpdateMask = None, session: Session = NEW_SESSION, ) -> APIResponse: - """Update a pool""" + """Update a pool.""" request_dict = get_json_request_dict() # Only slots can be modified in 'default_pool' try: @@ -136,7 +136,7 @@ def patch_pool( @security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_POOL)]) @provide_session def post_pool(*, session: Session = NEW_SESSION) -> APIResponse: - """Create a pool""" + """Create a pool.""" required_fields = {"name", "slots"} # Pool would require both fields in the post request fields_diff = required_fields - set(get_json_request_dict().keys()) if fields_diff: diff --git a/airflow/api_connexion/endpoints/provider_endpoint.py b/airflow/api_connexion/endpoints/provider_endpoint.py index ab81c16e46011..c829d9c968d61 100644 --- a/airflow/api_connexion/endpoints/provider_endpoint.py +++ b/airflow/api_connexion/endpoints/provider_endpoint.py @@ -43,7 +43,7 @@ def _provider_mapper(provider: ProviderInfo) -> Provider: @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_PROVIDER)]) def get_providers() -> APIResponse: - """Get providers""" + """Get providers.""" providers = [_provider_mapper(d) for d in ProvidersManager().providers.values()] total_entries = len(providers) return provider_collection_schema.dump( diff --git a/airflow/api_connexion/endpoints/request_dict.py b/airflow/api_connexion/endpoints/request_dict.py index 0e3a85402e7c9..b07e06c0b63f8 100644 --- a/airflow/api_connexion/endpoints/request_dict.py +++ b/airflow/api_connexion/endpoints/request_dict.py @@ -20,6 +20,7 @@ def get_json_request_dict() -> Mapping[str, Any]: + """Cast request dictionary to JSON.""" from flask import request return cast(Mapping[str, Any], request.get_json()) diff --git a/airflow/api_connexion/endpoints/role_and_permission_endpoint.py b/airflow/api_connexion/endpoints/role_and_permission_endpoint.py index fc7946ea306b0..4ed40caae508c 100644 --- a/airflow/api_connexion/endpoints/role_and_permission_endpoint.py +++ b/airflow/api_connexion/endpoints/role_and_permission_endpoint.py @@ -42,7 +42,7 @@ def _check_action_and_resource(sm: AirflowSecurityManager, perms: list[tuple[str, str]]) -> None: """ - Checks if the action or resource exists and raise 400 if not + Checks if the action or resource exists and otherwise raise 400. This function is intended for use in the REST API because it raise 400 """ @@ -55,7 +55,7 @@ def _check_action_and_resource(sm: AirflowSecurityManager, perms: list[tuple[str @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_ROLE)]) def get_role(*, role_name: str) -> APIResponse: - """Get role""" + """Get role.""" ab_security_manager = get_airflow_app().appbuilder.sm role = ab_security_manager.find_role(name=role_name) if not role: @@ -66,7 +66,7 @@ def get_role(*, role_name: str) -> APIResponse: @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_ROLE)]) @format_parameters({"limit": check_limit}) def get_roles(*, order_by: str = "name", limit: int, offset: int | None = None) -> APIResponse: - """Get roles""" + """Get roles.""" appbuilder = get_airflow_app().appbuilder session = appbuilder.get_session total_entries = session.query(func.count(Role.id)).scalar() @@ -90,7 +90,7 @@ def get_roles(*, order_by: str = "name", limit: int, offset: int | None = None) @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_ACTION)]) @format_parameters({"limit": check_limit}) def get_permissions(*, limit: int, offset: int | None = None) -> APIResponse: - """Get permissions""" + """Get permissions.""" session = get_airflow_app().appbuilder.get_session total_entries = session.query(func.count(Action.id)).scalar() query = session.query(Action) @@ -100,7 +100,7 @@ def get_permissions(*, limit: int, offset: int | None = None) -> APIResponse: @security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_ROLE)]) def delete_role(*, role_name: str) -> APIResponse: - """Delete a role""" + """Delete a role.""" ab_security_manager = get_airflow_app().appbuilder.sm role = ab_security_manager.find_role(name=role_name) if not role: @@ -111,7 +111,7 @@ def delete_role(*, role_name: str) -> APIResponse: @security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_ROLE)]) def patch_role(*, role_name: str, update_mask: UpdateMask = None) -> APIResponse: - """Update a role""" + """Update a role.""" appbuilder = get_airflow_app().appbuilder security_manager = appbuilder.sm body = request.json @@ -145,7 +145,7 @@ def patch_role(*, role_name: str, update_mask: UpdateMask = None) -> APIResponse @security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_ROLE)]) def post_role() -> APIResponse: - """Create a new role""" + """Create a new role.""" appbuilder = get_airflow_app().appbuilder security_manager = appbuilder.sm body = request.json diff --git a/airflow/api_connexion/endpoints/task_endpoint.py b/airflow/api_connexion/endpoints/task_endpoint.py index 61e9f05bb199d..23c2b32487b31 100644 --- a/airflow/api_connexion/endpoints/task_endpoint.py +++ b/airflow/api_connexion/endpoints/task_endpoint.py @@ -54,7 +54,7 @@ def get_task(*, dag_id: str, task_id: str) -> APIResponse: ], ) def get_tasks(*, dag_id: str, order_by: str = "task_id") -> APIResponse: - """Get tasks for DAG""" + """Get tasks for DAG.""" dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) if not dag: raise NotFound("DAG not found") diff --git a/airflow/api_connexion/endpoints/task_instance_endpoint.py b/airflow/api_connexion/endpoints/task_instance_endpoint.py index ce2395706da1b..74f8d2a015d14 100644 --- a/airflow/api_connexion/endpoints/task_instance_endpoint.py +++ b/airflow/api_connexion/endpoints/task_instance_endpoint.py @@ -69,7 +69,7 @@ def get_task_instance( task_id: str, session: Session = NEW_SESSION, ) -> APIResponse: - """Get task instance""" + """Get task instance.""" query = ( session.query(TI) .filter(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id) @@ -118,7 +118,7 @@ def get_mapped_task_instance( map_index: int, session: Session = NEW_SESSION, ) -> APIResponse: - """Get task instance""" + """Get task instance.""" query = ( session.query(TI) .filter( diff --git a/airflow/api_connexion/endpoints/user_endpoint.py b/airflow/api_connexion/endpoints/user_endpoint.py index e6c670d12d298..506e11e00612c 100644 --- a/airflow/api_connexion/endpoints/user_endpoint.py +++ b/airflow/api_connexion/endpoints/user_endpoint.py @@ -41,7 +41,7 @@ @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_USER)]) def get_user(*, username: str) -> APIResponse: - """Get a user""" + """Get a user.""" ab_security_manager = get_airflow_app().appbuilder.sm user = ab_security_manager.find_user(username=username) if not user: @@ -52,7 +52,7 @@ def get_user(*, username: str) -> APIResponse: @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_USER)]) @format_parameters({"limit": check_limit}) def get_users(*, limit: int, order_by: str = "id", offset: str | None = None) -> APIResponse: - """Get users""" + """Get users.""" appbuilder = get_airflow_app().appbuilder session = appbuilder.get_session total_entries = session.query(func.count(User.id)).scalar() @@ -83,7 +83,7 @@ def get_users(*, limit: int, order_by: str = "id", offset: str | None = None) -> @security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_USER)]) def post_user() -> APIResponse: - """Create a new user""" + """Create a new user.""" try: data = user_schema.load(request.json) except ValidationError as e: @@ -126,7 +126,7 @@ def post_user() -> APIResponse: @security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_USER)]) def patch_user(*, username: str, update_mask: UpdateMask = None) -> APIResponse: - """Update a user""" + """Update a user.""" try: data = user_schema.load(request.json) except ValidationError as e: @@ -195,7 +195,7 @@ def patch_user(*, username: str, update_mask: UpdateMask = None) -> APIResponse: @security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_USER)]) def delete_user(*, username: str) -> APIResponse: - """Delete a user""" + """Delete a user.""" security_manager = get_airflow_app().appbuilder.sm user = security_manager.find_user(username=username) diff --git a/airflow/api_connexion/endpoints/variable_endpoint.py b/airflow/api_connexion/endpoints/variable_endpoint.py index 88357c84b6bce..d22d14996b2b8 100644 --- a/airflow/api_connexion/endpoints/variable_endpoint.py +++ b/airflow/api_connexion/endpoints/variable_endpoint.py @@ -36,7 +36,7 @@ @security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_VARIABLE)]) def delete_variable(*, variable_key: str) -> Response: - """Delete variable""" + """Delete variable.""" if Variable.delete(variable_key) == 0: raise NotFound("Variable not found") return Response(status=HTTPStatus.NO_CONTENT) @@ -45,7 +45,7 @@ def delete_variable(*, variable_key: str) -> Response: @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_VARIABLE)]) @provide_session def get_variable(*, variable_key: str, session: Session = NEW_SESSION) -> Response: - """Get a variable by key""" + """Get a variable by key.""" var = session.query(Variable).filter(Variable.key == variable_key) if not var.count(): raise NotFound("Variable not found") @@ -62,7 +62,7 @@ def get_variables( offset: int | None = None, session: Session = NEW_SESSION, ) -> Response: - """Get all variable values""" + """Get all variable values.""" total_entries = session.query(func.count(Variable.id)).scalar() to_replace = {"value": "val"} allowed_filter_attrs = ["value", "key", "id"] @@ -79,7 +79,7 @@ def get_variables( @security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_VARIABLE)]) def patch_variable(*, variable_key: str, update_mask: UpdateMask = None) -> Response: - """Update a variable by key""" + """Update a variable by key.""" try: data = variable_schema.load(get_json_request_dict()) except ValidationError as err: @@ -100,7 +100,7 @@ def patch_variable(*, variable_key: str, update_mask: UpdateMask = None) -> Resp @security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_VARIABLE)]) def post_variables() -> Response: - """Create a variable""" + """Create a variable.""" try: data = variable_schema.load(get_json_request_dict()) diff --git a/airflow/api_connexion/endpoints/version_endpoint.py b/airflow/api_connexion/endpoints/version_endpoint.py index 15edd156703a1..79b4d2f1e1719 100644 --- a/airflow/api_connexion/endpoints/version_endpoint.py +++ b/airflow/api_connexion/endpoints/version_endpoint.py @@ -25,14 +25,14 @@ class VersionInfo(NamedTuple): - """Version information""" + """Version information.""" version: str git_version: str | None def get_version() -> APIResponse: - """Get version information""" + """Get version information.""" airflow_version = airflow.__version__ git_version = get_airflow_git_version() diff --git a/airflow/api_connexion/endpoints/xcom_endpoint.py b/airflow/api_connexion/endpoints/xcom_endpoint.py index bc71626fda401..2ab5ec26f5a2a 100644 --- a/airflow/api_connexion/endpoints/xcom_endpoint.py +++ b/airflow/api_connexion/endpoints/xcom_endpoint.py @@ -52,7 +52,7 @@ def get_xcom_entries( offset: int | None = None, session: Session = NEW_SESSION, ) -> APIResponse: - """Get all XCom values""" + """Get all XCom values.""" query = session.query(XCom) if dag_id == "~": appbuilder = get_airflow_app().appbuilder @@ -91,7 +91,7 @@ def get_xcom_entry( deserialize: bool = False, session: Session = NEW_SESSION, ) -> APIResponse: - """Get an XCom entry""" + """Get an XCom entry.""" if deserialize: query = session.query(XCom, XCom.value) else: diff --git a/airflow/api_connexion/exceptions.py b/airflow/api_connexion/exceptions.py index 631a4a0df391c..11468e1506feb 100644 --- a/airflow/api_connexion/exceptions.py +++ b/airflow/api_connexion/exceptions.py @@ -73,7 +73,7 @@ def common_error_handler(exception: BaseException) -> flask.Response: class NotFound(ProblemException): - """Raise when the object cannot be found""" + """Raise when the object cannot be found.""" def __init__( self, @@ -93,7 +93,7 @@ def __init__( class BadRequest(ProblemException): - """Raise when the server processes a bad request""" + """Raise when the server processes a bad request.""" def __init__( self, @@ -113,7 +113,7 @@ def __init__( class Unauthenticated(ProblemException): - """Raise when the user is not authenticated""" + """Raise when the user is not authenticated.""" def __init__( self, @@ -133,7 +133,7 @@ def __init__( class PermissionDenied(ProblemException): - """Raise when the user does not have the required permissions""" + """Raise when the user does not have the required permissions.""" def __init__( self, @@ -153,7 +153,7 @@ def __init__( class AlreadyExists(ProblemException): - """Raise when the object already exists""" + """Raise when the object already exists.""" def __init__( self, @@ -173,7 +173,7 @@ def __init__( class Unknown(ProblemException): - """Returns a response body and status code for HTTP 500 exception""" + """Returns a response body and status code for HTTP 500 exception.""" def __init__( self, diff --git a/airflow/api_connexion/parameters.py b/airflow/api_connexion/parameters.py index 8320c18852fb4..8064912d921fa 100644 --- a/airflow/api_connexion/parameters.py +++ b/airflow/api_connexion/parameters.py @@ -30,13 +30,15 @@ def validate_istimezone(value: datetime) -> None: - """Validates that a datetime is not naive""" + """Validates that a datetime is not naive.""" if not value.tzinfo: raise BadRequest("Invalid datetime format", detail="Naive datetime is disallowed") def format_datetime(value: str) -> datetime: """ + Format datetime objects. + Datetime format parser for args since connexion doesn't parse datetimes https://github.com/zalando/connexion/issues/476 @@ -53,6 +55,8 @@ def format_datetime(value: str) -> datetime: def check_limit(value: int) -> int: """ + Check the limit does not exceed configured value. + This checks the limit passed to view and raises BadRequest if limit exceed user configured value """ @@ -99,7 +103,7 @@ def apply_sorting( to_replace: dict[str, str] | None = None, allowed_attrs: Container[str] | None = None, ) -> Query: - """Apply sorting to query""" + """Apply sorting to query.""" lstriped_orderby = order_by.lstrip("-") if allowed_attrs and lstriped_orderby not in allowed_attrs: raise BadRequest( diff --git a/airflow/api_connexion/schemas/common_schema.py b/airflow/api_connexion/schemas/common_schema.py index bd6b5ccb74d75..cf510137621b6 100644 --- a/airflow/api_connexion/schemas/common_schema.py +++ b/airflow/api_connexion/schemas/common_schema.py @@ -32,13 +32,13 @@ class CronExpression(typing.NamedTuple): - """Cron expression schema""" + """Cron expression schema.""" value: str class TimeDeltaSchema(Schema): - """Time delta schema""" + """Time delta schema.""" objectType = fields.Constant("TimeDelta", data_key="__type") days = fields.Integer() @@ -47,14 +47,14 @@ class TimeDeltaSchema(Schema): @marshmallow.post_load def make_time_delta(self, data, **kwargs): - """Create time delta based on data""" + """Create time delta based on data.""" if "objectType" in data: del data["objectType"] return datetime.timedelta(**data) class RelativeDeltaSchema(Schema): - """Relative delta schema""" + """Relative delta schema.""" objectType = fields.Constant("RelativeDelta", data_key="__type") years = fields.Integer() @@ -75,7 +75,7 @@ class RelativeDeltaSchema(Schema): @marshmallow.post_load def make_relative_delta(self, data, **kwargs): - """Create relative delta based on data""" + """Create relative delta based on data.""" if "objectType" in data: del data["objectType"] @@ -83,14 +83,14 @@ def make_relative_delta(self, data, **kwargs): class CronExpressionSchema(Schema): - """Cron expression schema""" + """Cron expression schema.""" objectType = fields.Constant("CronExpression", data_key="__type") value = fields.String(required=True) @marshmallow.post_load def make_cron_expression(self, data, **kwargs): - """Create cron expression based on data""" + """Create cron expression based on data.""" return CronExpression(data["value"]) @@ -119,7 +119,7 @@ def _dump(self, obj, update_fields=True, **kwargs): return super()._dump(obj, update_fields=update_fields, **kwargs) def get_obj_type(self, obj): - """Select schema based on object type""" + """Select schema based on object type.""" if isinstance(obj, datetime.timedelta): return "TimeDelta" elif isinstance(obj, relativedelta.relativedelta): @@ -131,7 +131,7 @@ def get_obj_type(self, obj): class ColorField(fields.String): - """Schema for color property""" + """Schema for color property.""" def __init__(self, **metadata): super().__init__(**metadata) @@ -139,7 +139,7 @@ def __init__(self, **metadata): class WeightRuleField(fields.String): - """Schema for WeightRule""" + """Schema for WeightRule.""" def __init__(self, **metadata): super().__init__(**metadata) @@ -147,7 +147,7 @@ def __init__(self, **metadata): class TimezoneField(fields.String): - """Schema for timezone""" + """Schema for timezone.""" class ClassReferenceSchema(Schema): diff --git a/airflow/api_connexion/schemas/config_schema.py b/airflow/api_connexion/schemas/config_schema.py index 938b5cd784677..28095c177a1df 100644 --- a/airflow/api_connexion/schemas/config_schema.py +++ b/airflow/api_connexion/schemas/config_schema.py @@ -22,41 +22,41 @@ class ConfigOptionSchema(Schema): - """Config Option Schema""" + """Config Option Schema.""" key = fields.String(required=True) value = fields.String(required=True) class ConfigOption(NamedTuple): - """Config option""" + """Config option.""" key: str value: str class ConfigSectionSchema(Schema): - """Config Section Schema""" + """Config Section Schema.""" name = fields.String(required=True) options = fields.List(fields.Nested(ConfigOptionSchema)) class ConfigSection(NamedTuple): - """List of config options within a section""" + """List of config options within a section.""" name: str options: list[ConfigOption] class ConfigSchema(Schema): - """Config Schema""" + """Config Schema.""" sections = fields.List(fields.Nested(ConfigSectionSchema)) class Config(NamedTuple): - """List of config sections with their options""" + """List of config sections with their options.""" sections: list[ConfigSection] diff --git a/airflow/api_connexion/schemas/connection_schema.py b/airflow/api_connexion/schemas/connection_schema.py index 0ec7ab314bcb7..4288ce079c554 100644 --- a/airflow/api_connexion/schemas/connection_schema.py +++ b/airflow/api_connexion/schemas/connection_schema.py @@ -27,10 +27,10 @@ class ConnectionCollectionItemSchema(SQLAlchemySchema): - """Schema for a connection item""" + """Schema for a connection item.""" class Meta: - """Meta""" + """Meta.""" model = Connection @@ -44,7 +44,7 @@ class Meta: class ConnectionSchema(ConnectionCollectionItemSchema): - """Connection schema""" + """Connection schema.""" password = auto_field(load_only=True) extra = fields.Method("serialize_extra", deserialize="deserialize_extra", allow_none=True) @@ -68,21 +68,21 @@ def deserialize_extra(value): # an explicit deserialize method is required for class ConnectionCollection(NamedTuple): - """List of Connections with meta""" + """List of Connections with meta.""" connections: list[Connection] total_entries: int class ConnectionCollectionSchema(Schema): - """Connection Collection Schema""" + """Connection Collection Schema.""" connections = fields.List(fields.Nested(ConnectionCollectionItemSchema)) total_entries = fields.Int() class ConnectionTestSchema(Schema): - """connection Test Schema""" + """connection Test Schema.""" status = fields.Boolean(required=True) message = fields.String(required=True) diff --git a/airflow/api_connexion/schemas/dag_run_schema.py b/airflow/api_connexion/schemas/dag_run_schema.py index f4f2fc1832716..1a63a0eba3385 100644 --- a/airflow/api_connexion/schemas/dag_run_schema.py +++ b/airflow/api_connexion/schemas/dag_run_schema.py @@ -36,7 +36,7 @@ class ConfObject(fields.Field): - """The conf field""" + """The conf field.""" def _serialize(self, value, attr, obj, **kwargs): if not value: @@ -53,10 +53,10 @@ def _deserialize(self, value, attr, data, **kwargs): class DAGRunSchema(SQLAlchemySchema): - """Schema for DAGRun""" + """Schema for DAGRun.""" class Meta: - """Meta""" + """Meta.""" model = DagRun dateformat = "iso" @@ -113,7 +113,7 @@ def autofill(self, data, **kwargs): class SetDagRunStateFormSchema(Schema): - """Schema for handling the request of setting state of DAG run""" + """Schema for handling the request of setting state of DAG run.""" state = DagStateField( validate=validate.OneOf( @@ -123,30 +123,30 @@ class SetDagRunStateFormSchema(Schema): class ClearDagRunStateFormSchema(Schema): - """Schema for handling the request of clearing a DAG run""" + """Schema for handling the request of clearing a DAG run.""" dry_run = fields.Boolean(load_default=True) class DAGRunCollection(NamedTuple): - """List of DAGRuns with metadata""" + """List of DAGRuns with metadata.""" dag_runs: list[DagRun] total_entries: int class DAGRunCollectionSchema(Schema): - """DAGRun Collection schema""" + """DAGRun Collection schema.""" dag_runs = fields.List(fields.Nested(DAGRunSchema)) total_entries = fields.Int() class DagRunsBatchFormSchema(Schema): - """Schema to validate and deserialize the Form(request payload) submitted to DagRun Batch endpoint""" + """Schema to validate and deserialize the Form(request payload) submitted to DagRun Batch endpoint.""" class Meta: - """Meta""" + """Meta.""" datetimeformat = "iso" strict = True @@ -165,7 +165,7 @@ class Meta: class SetDagRunNoteFormSchema(Schema): - """Schema for handling the request of clearing a DAG run""" + """Schema for handling the request of clearing a DAG run.""" notes = fields.String(allow_none=True, validate=validate.Length(max=1000)) diff --git a/airflow/api_connexion/schemas/dag_schema.py b/airflow/api_connexion/schemas/dag_schema.py index b658ac1cc2795..182bbf180334f 100644 --- a/airflow/api_connexion/schemas/dag_schema.py +++ b/airflow/api_connexion/schemas/dag_schema.py @@ -29,10 +29,10 @@ class DagTagSchema(SQLAlchemySchema): - """Dag Tag schema""" + """Dag Tag schema.""" class Meta: - """Meta""" + """Meta.""" model = DagTag @@ -40,10 +40,10 @@ class Meta: class DAGSchema(SQLAlchemySchema): - """DAG schema""" + """DAG schema.""" class Meta: - """Meta""" + """Meta.""" model = DagModel @@ -76,20 +76,20 @@ class Meta: @staticmethod def get_owners(obj: DagModel): - """Convert owners attribute to DAG representation""" + """Convert owners attribute to DAG representation.""" if not getattr(obj, "owners", None): return [] return obj.owners.split(",") @staticmethod def get_token(obj: DagModel): - """Return file token""" + """Return file token.""" serializer = URLSafeSerializer(conf.get_mandatory_value("webserver", "secret_key")) return serializer.dumps(obj.fileloc) class DAGDetailSchema(DAGSchema): - """DAG details""" + """DAG details.""" owners = fields.Method("get_owners", dump_only=True) timezone = TimezoneField() @@ -117,7 +117,7 @@ def get_concurrency(obj: DAG): @staticmethod def get_tags(obj: DAG): - """Dumps tags as objects""" + """Dumps tags as objects.""" tags = obj.tags if tags: return [DagTagSchema().dump(dict(name=tag)) for tag in tags] @@ -125,37 +125,37 @@ def get_tags(obj: DAG): @staticmethod def get_owners(obj: DAG): - """Convert owners attribute to DAG representation""" + """Convert owners attribute to DAG representation.""" if not getattr(obj, "owner", None): return [] return obj.owner.split(",") @staticmethod def get_is_paused(obj: DAG): - """Checks entry in DAG table to see if this DAG is paused""" + """Checks entry in DAG table to see if this DAG is paused.""" return obj.get_is_paused() @staticmethod def get_is_active(obj: DAG): - """Checks entry in DAG table to see if this DAG is active""" + """Checks entry in DAG table to see if this DAG is active.""" return obj.get_is_active() @staticmethod def get_params(obj: DAG): - """Get the Params defined in a DAG""" + """Get the Params defined in a DAG.""" params = obj.params return {k: v.dump() for k, v in params.items()} class DAGCollection(NamedTuple): - """List of DAGs with metadata""" + """List of DAGs with metadata.""" dags: list[DagModel] total_entries: int class DAGCollectionSchema(Schema): - """DAG Collection schema""" + """DAG Collection schema.""" dags = fields.List(fields.Nested(DAGSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/dag_source_schema.py b/airflow/api_connexion/schemas/dag_source_schema.py index 1520d58296c2d..adb89ce76a569 100644 --- a/airflow/api_connexion/schemas/dag_source_schema.py +++ b/airflow/api_connexion/schemas/dag_source_schema.py @@ -20,7 +20,7 @@ class DagSourceSchema(Schema): - """Dag Source schema""" + """Dag Source schema.""" content = fields.String(dump_only=True) diff --git a/airflow/api_connexion/schemas/dag_warning_schema.py b/airflow/api_connexion/schemas/dag_warning_schema.py index 211f251e7d26e..35c9830d273c7 100644 --- a/airflow/api_connexion/schemas/dag_warning_schema.py +++ b/airflow/api_connexion/schemas/dag_warning_schema.py @@ -25,10 +25,10 @@ class DagWarningSchema(SQLAlchemySchema): - """Import error schema""" + """Import error schema.""" class Meta: - """Meta""" + """Meta.""" model = DagWarning @@ -39,14 +39,14 @@ class Meta: class DagWarningCollection(NamedTuple): - """List of dag warnings with metadata""" + """List of dag warnings with metadata.""" dag_warnings: list[DagWarning] total_entries: int class DagWarningCollectionSchema(Schema): - """Import error collection schema""" + """Import error collection schema.""" dag_warnings = fields.List(fields.Nested(DagWarningSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/dataset_schema.py b/airflow/api_connexion/schemas/dataset_schema.py index f2230aa57255a..bfdd0d24231c8 100644 --- a/airflow/api_connexion/schemas/dataset_schema.py +++ b/airflow/api_connexion/schemas/dataset_schema.py @@ -32,10 +32,10 @@ class TaskOutletDatasetReferenceSchema(SQLAlchemySchema): - """TaskOutletDatasetReference DB schema""" + """TaskOutletDatasetReference DB schema.""" class Meta: - """Meta""" + """Meta.""" model = TaskOutletDatasetReference @@ -46,10 +46,10 @@ class Meta: class DagScheduleDatasetReferenceSchema(SQLAlchemySchema): - """DagScheduleDatasetReference DB schema""" + """DagScheduleDatasetReference DB schema.""" class Meta: - """Meta""" + """Meta.""" model = DagScheduleDatasetReference @@ -59,10 +59,10 @@ class Meta: class DatasetSchema(SQLAlchemySchema): - """Dataset DB schema""" + """Dataset DB schema.""" class Meta: - """Meta""" + """Meta.""" model = DatasetModel @@ -76,14 +76,14 @@ class Meta: class DatasetCollection(NamedTuple): - """List of Datasets with meta""" + """List of Datasets with meta.""" datasets: list[DatasetModel] total_entries: int class DatasetCollectionSchema(Schema): - """Dataset Collection Schema""" + """Dataset Collection Schema.""" datasets = fields.List(fields.Nested(DatasetSchema)) total_entries = fields.Int() @@ -94,10 +94,10 @@ class DatasetCollectionSchema(Schema): class BasicDAGRunSchema(SQLAlchemySchema): - """Basic Schema for DAGRun""" + """Basic Schema for DAGRun.""" class Meta: - """Meta""" + """Meta.""" model = DagRun dateformat = "iso" @@ -113,10 +113,10 @@ class Meta: class DatasetEventSchema(SQLAlchemySchema): - """Dataset Event DB schema""" + """Dataset Event DB schema.""" class Meta: - """Meta""" + """Meta.""" model = DatasetEvent @@ -133,14 +133,14 @@ class Meta: class DatasetEventCollection(NamedTuple): - """List of Dataset events with meta""" + """List of Dataset events with meta.""" dataset_events: list[DatasetEvent] total_entries: int class DatasetEventCollectionSchema(Schema): - """Dataset Event Collection Schema""" + """Dataset Event Collection Schema.""" dataset_events = fields.List(fields.Nested(DatasetEventSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/enum_schemas.py b/airflow/api_connexion/schemas/enum_schemas.py index 63c66000a7244..981a3669b1b58 100644 --- a/airflow/api_connexion/schemas/enum_schemas.py +++ b/airflow/api_connexion/schemas/enum_schemas.py @@ -22,7 +22,7 @@ class DagStateField(fields.String): - """Schema for DagState Enum""" + """Schema for DagState Enum.""" def __init__(self, **metadata): super().__init__(**metadata) @@ -30,7 +30,7 @@ def __init__(self, **metadata): class TaskInstanceStateField(fields.String): - """Schema for TaskInstanceState Enum""" + """Schema for TaskInstanceState Enum.""" def __init__(self, **metadata): super().__init__(**metadata) diff --git a/airflow/api_connexion/schemas/error_schema.py b/airflow/api_connexion/schemas/error_schema.py index ebc6f6c85b5bf..dcd4d37ff7781 100644 --- a/airflow/api_connexion/schemas/error_schema.py +++ b/airflow/api_connexion/schemas/error_schema.py @@ -25,10 +25,10 @@ class ImportErrorSchema(SQLAlchemySchema): - """Import error schema""" + """Import error schema.""" class Meta: - """Meta""" + """Meta.""" model = ImportError @@ -41,14 +41,14 @@ class Meta: class ImportErrorCollection(NamedTuple): - """List of import errors with metadata""" + """List of import errors with metadata.""" import_errors: list[ImportError] total_entries: int class ImportErrorCollectionSchema(Schema): - """Import error collection schema""" + """Import error collection schema.""" import_errors = fields.List(fields.Nested(ImportErrorSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/event_log_schema.py b/airflow/api_connexion/schemas/event_log_schema.py index a299ba51c4d48..5bf4ccf00d0b3 100644 --- a/airflow/api_connexion/schemas/event_log_schema.py +++ b/airflow/api_connexion/schemas/event_log_schema.py @@ -25,10 +25,10 @@ class EventLogSchema(SQLAlchemySchema): - """Event log schema""" + """Event log schema.""" class Meta: - """Meta""" + """Meta.""" model = Log @@ -43,14 +43,14 @@ class Meta: class EventLogCollection(NamedTuple): - """List of import errors with metadata""" + """List of import errors with metadata.""" event_logs: list[Log] total_entries: int class EventLogCollectionSchema(Schema): - """EventLog Collection Schema""" + """EventLog Collection Schema.""" event_logs = fields.List(fields.Nested(EventLogSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/health_schema.py b/airflow/api_connexion/schemas/health_schema.py index 9f4d773cc0ef6..67155406c1a79 100644 --- a/airflow/api_connexion/schemas/health_schema.py +++ b/airflow/api_connexion/schemas/health_schema.py @@ -20,23 +20,23 @@ class BaseInfoSchema(Schema): - """Base status field for metadatabase and scheduler""" + """Base status field for metadatabase and scheduler.""" status = fields.String(dump_only=True) class MetaDatabaseInfoSchema(BaseInfoSchema): - """Schema for Metadatabase info""" + """Schema for Metadatabase info.""" class SchedulerInfoSchema(BaseInfoSchema): - """Schema for Metadatabase info""" + """Schema for Metadatabase info.""" latest_scheduler_heartbeat = fields.String(dump_only=True) class HealthInfoSchema(Schema): - """Schema for the Health endpoint""" + """Schema for the Health endpoint.""" metadatabase = fields.Nested(MetaDatabaseInfoSchema) scheduler = fields.Nested(SchedulerInfoSchema) diff --git a/airflow/api_connexion/schemas/job_schema.py b/airflow/api_connexion/schemas/job_schema.py index 485a373ffab44..4d98d39c92030 100644 --- a/airflow/api_connexion/schemas/job_schema.py +++ b/airflow/api_connexion/schemas/job_schema.py @@ -23,10 +23,10 @@ class JobSchema(SQLAlchemySchema): - """Sla Miss Schema""" + """Sla Miss Schema.""" class Meta: - """Meta""" + """Meta.""" model = BaseJob diff --git a/airflow/api_connexion/schemas/log_schema.py b/airflow/api_connexion/schemas/log_schema.py index 5ba649f4d6988..82e291fafc42c 100644 --- a/airflow/api_connexion/schemas/log_schema.py +++ b/airflow/api_connexion/schemas/log_schema.py @@ -22,14 +22,14 @@ class LogsSchema(Schema): - """Schema for logs""" + """Schema for logs.""" content = fields.Str() continuation_token = fields.Str() class LogResponseObject(NamedTuple): - """Log Response Object""" + """Log Response Object.""" content: str continuation_token: str | None diff --git a/airflow/api_connexion/schemas/plugin_schema.py b/airflow/api_connexion/schemas/plugin_schema.py index 88c546012fd8a..780fef17bf76a 100644 --- a/airflow/api_connexion/schemas/plugin_schema.py +++ b/airflow/api_connexion/schemas/plugin_schema.py @@ -22,7 +22,7 @@ class PluginSchema(Schema): - """Plugin schema""" + """Plugin schema.""" name = fields.String() hooks = fields.List(fields.String()) @@ -37,14 +37,14 @@ class PluginSchema(Schema): class PluginCollection(NamedTuple): - """Plugin List""" + """Plugin List.""" plugins: list total_entries: int class PluginCollectionSchema(Schema): - """Plugin Collection List""" + """Plugin Collection List.""" plugins = fields.List(fields.Nested(PluginSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/pool_schema.py b/airflow/api_connexion/schemas/pool_schema.py index 8e2f49cc36e01..4e25287d1d357 100644 --- a/airflow/api_connexion/schemas/pool_schema.py +++ b/airflow/api_connexion/schemas/pool_schema.py @@ -25,10 +25,10 @@ class PoolSchema(SQLAlchemySchema): - """Pool schema""" + """Pool schema.""" class Meta: - """Meta""" + """Meta.""" model = Pool @@ -68,14 +68,14 @@ def get_open_slots(obj: Pool) -> float: class PoolCollection(NamedTuple): - """List of Pools with metadata""" + """List of Pools with metadata.""" pools: list[Pool] total_entries: int class PoolCollectionSchema(Schema): - """Pool Collection schema""" + """Pool Collection schema.""" pools = fields.List(fields.Nested(PoolSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/role_and_permission_schema.py b/airflow/api_connexion/schemas/role_and_permission_schema.py index a2a078cda0cd6..324336c288668 100644 --- a/airflow/api_connexion/schemas/role_and_permission_schema.py +++ b/airflow/api_connexion/schemas/role_and_permission_schema.py @@ -25,10 +25,10 @@ class ActionSchema(SQLAlchemySchema): - """Action Action Schema""" + """Action Schema.""" class Meta: - """Meta""" + """Meta.""" model = Action @@ -36,10 +36,10 @@ class Meta: class ResourceSchema(SQLAlchemySchema): - """View menu Schema""" + """View menu Schema.""" class Meta: - """Meta""" + """Meta.""" model = Resource @@ -47,24 +47,24 @@ class Meta: class ActionCollection(NamedTuple): - """Action Action Collection""" + """Action Collection.""" actions: list[Action] total_entries: int class ActionCollectionSchema(Schema): - """Permissions list schema""" + """Permissions list schema.""" actions = fields.List(fields.Nested(ActionSchema)) total_entries = fields.Int() class ActionResourceSchema(SQLAlchemySchema): - """Action View Schema""" + """Action View Schema.""" class Meta: - """Meta""" + """Meta.""" model = Permission @@ -73,10 +73,10 @@ class Meta: class RoleSchema(SQLAlchemySchema): - """Role item schema""" + """Role item schema.""" class Meta: - """Meta""" + """Meta.""" model = Role @@ -85,14 +85,14 @@ class Meta: class RoleCollection(NamedTuple): - """List of roles""" + """List of roles.""" roles: list[Role] total_entries: int class RoleCollectionSchema(Schema): - """List of roles""" + """List of roles.""" roles = fields.List(fields.Nested(RoleSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/sla_miss_schema.py b/airflow/api_connexion/schemas/sla_miss_schema.py index e5ddc6df31f0f..97a462e186d59 100644 --- a/airflow/api_connexion/schemas/sla_miss_schema.py +++ b/airflow/api_connexion/schemas/sla_miss_schema.py @@ -22,10 +22,10 @@ class SlaMissSchema(SQLAlchemySchema): - """Sla Miss Schema""" + """Sla Miss Schema.""" class Meta: - """Meta""" + """Meta.""" model = SlaMiss diff --git a/airflow/api_connexion/schemas/task_instance_schema.py b/airflow/api_connexion/schemas/task_instance_schema.py index e4284a0194b36..2dc2355d495cf 100644 --- a/airflow/api_connexion/schemas/task_instance_schema.py +++ b/airflow/api_connexion/schemas/task_instance_schema.py @@ -34,10 +34,10 @@ class TaskInstanceSchema(SQLAlchemySchema): - """Task instance schema""" + """Task instance schema.""" class Meta: - """Meta""" + """Meta.""" model = TaskInstance @@ -81,21 +81,21 @@ def get_attribute(self, obj, attr, default): class TaskInstanceCollection(NamedTuple): - """List of task instances with metadata""" + """List of task instances with metadata.""" task_instances: list[tuple[TaskInstance, SlaMiss | None]] total_entries: int class TaskInstanceCollectionSchema(Schema): - """Task instance collection schema""" + """Task instance collection schema.""" task_instances = fields.List(fields.Nested(TaskInstanceSchema)) total_entries = fields.Int() class TaskInstanceBatchFormSchema(Schema): - """Schema for the request form passed to Task Instance Batch endpoint""" + """Schema for the request form passed to Task Instance Batch endpoint.""" page_offset = fields.Int(load_default=0, validate=validate.Range(min=0)) page_limit = fields.Int(load_default=100, validate=validate.Range(min=1)) @@ -114,7 +114,7 @@ class TaskInstanceBatchFormSchema(Schema): class ClearTaskInstanceFormSchema(Schema): - """Schema for handling the request of clearing task instance of a Dag""" + """Schema for handling the request of clearing task instance of a Dag.""" dry_run = fields.Boolean(load_default=True) start_date = fields.DateTime(load_default=None, validate=validate_istimezone) @@ -133,7 +133,7 @@ class ClearTaskInstanceFormSchema(Schema): @validates_schema def validate_form(self, data, **kwargs): - """Validates clear task instance form""" + """Validates clear task instance form.""" if data["only_failed"] and data["only_running"]: raise ValidationError("only_failed and only_running both are set to True") if data["start_date"] and data["end_date"]: @@ -148,7 +148,7 @@ def validate_form(self, data, **kwargs): class SetTaskInstanceStateFormSchema(Schema): - """Schema for handling the request of setting state of task instance of a DAG""" + """Schema for handling the request of setting state of task instance of a DAG.""" dry_run = fields.Boolean(dump_default=True) task_id = fields.Str(required=True) @@ -162,20 +162,20 @@ class SetTaskInstanceStateFormSchema(Schema): @validates_schema def validate_form(self, data, **kwargs): - """Validates set task instance state form""" + """Validates set task instance state form.""" if not exactly_one(data.get("execution_date"), data.get("dag_run_id")): raise ValidationError("Exactly one of execution_date or dag_run_id must be provided") class SetSingleTaskInstanceStateFormSchema(Schema): - """Schema for handling the request of updating state of a single task instance""" + """Schema for handling the request of updating state of a single task instance.""" dry_run = fields.Boolean(dump_default=True) new_state = TaskInstanceStateField(required=True, validate=validate.OneOf([State.SUCCESS, State.FAILED])) class TaskInstanceReferenceSchema(Schema): - """Schema for the task instance reference schema""" + """Schema for the task instance reference schema.""" task_id = fields.Str() run_id = fields.Str(data_key="dag_run_id") @@ -184,19 +184,19 @@ class TaskInstanceReferenceSchema(Schema): class TaskInstanceReferenceCollection(NamedTuple): - """List of objects with metadata about taskinstance and dag_run_id""" + """List of objects with metadata about taskinstance and dag_run_id.""" task_instances: list[tuple[TaskInstance, str]] class TaskInstanceReferenceCollectionSchema(Schema): - """Collection schema for task reference""" + """Collection schema for task reference.""" task_instances = fields.List(fields.Nested(TaskInstanceReferenceSchema)) class SetTaskInstanceNoteFormSchema(Schema): - """Schema for settings a note for a TaskInstance""" + """Schema for settings a note for a TaskInstance.""" # Note: We can't add map_index to the url as subpaths can't start with dashes. map_index = fields.Int(allow_none=False) diff --git a/airflow/api_connexion/schemas/task_schema.py b/airflow/api_connexion/schemas/task_schema.py index 2b9cc5779428b..0fcb9ff18fb88 100644 --- a/airflow/api_connexion/schemas/task_schema.py +++ b/airflow/api_connexion/schemas/task_schema.py @@ -31,7 +31,7 @@ class TaskSchema(Schema): - """Task schema""" + """Task schema.""" class_ref = fields.Method("_get_class_reference", dump_only=True) operator_name = fields.Method("_get_operator_name", dump_only=True) @@ -71,20 +71,20 @@ def _get_operator_name(self, obj): @staticmethod def get_params(obj): - """Get the Params defined in a Task""" + """Get the Params defined in a Task.""" params = obj.params return {k: v.dump() for k, v in params.items()} class TaskCollection(NamedTuple): - """List of Tasks with metadata""" + """List of Tasks with metadata.""" tasks: list[Operator] total_entries: int class TaskCollectionSchema(Schema): - """Schema for TaskCollection""" + """Schema for TaskCollection.""" tasks = fields.List(fields.Nested(TaskSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/trigger_schema.py b/airflow/api_connexion/schemas/trigger_schema.py index 4b2827472406b..15d180a5732ff 100644 --- a/airflow/api_connexion/schemas/trigger_schema.py +++ b/airflow/api_connexion/schemas/trigger_schema.py @@ -23,10 +23,10 @@ class TriggerSchema(SQLAlchemySchema): - """Sla Miss Schema""" + """Sla Miss Schema.""" class Meta: - """Meta""" + """Meta.""" model = Trigger diff --git a/airflow/api_connexion/schemas/user_schema.py b/airflow/api_connexion/schemas/user_schema.py index 0c97f06cce8f6..843ad32f0245c 100644 --- a/airflow/api_connexion/schemas/user_schema.py +++ b/airflow/api_connexion/schemas/user_schema.py @@ -27,10 +27,10 @@ class UserCollectionItemSchema(SQLAlchemySchema): - """user collection item schema""" + """user collection item schema.""" class Meta: - """Meta""" + """Meta.""" model = User dateformat = "iso" @@ -49,20 +49,20 @@ class Meta: class UserSchema(UserCollectionItemSchema): - """User schema""" + """User schema.""" password = auto_field(load_only=True) class UserCollection(NamedTuple): - """User collection""" + """User collection.""" users: list[User] total_entries: int class UserCollectionSchema(Schema): - """User collection schema""" + """User collection schema.""" users = fields.List(fields.Nested(UserCollectionItemSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/variable_schema.py b/airflow/api_connexion/schemas/variable_schema.py index 8c2b97d06bae2..ffe54f0742907 100644 --- a/airflow/api_connexion/schemas/variable_schema.py +++ b/airflow/api_connexion/schemas/variable_schema.py @@ -20,7 +20,7 @@ class VariableSchema(Schema): - """Variable Schema""" + """Variable Schema.""" key = fields.String(required=True) value = fields.String(attribute="val", required=True) @@ -28,7 +28,7 @@ class VariableSchema(Schema): class VariableCollectionSchema(Schema): - """Variable Collection Schema""" + """Variable Collection Schema.""" variables = fields.List(fields.Nested(VariableSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/version_schema.py b/airflow/api_connexion/schemas/version_schema.py index 91f75815cd006..519f91c55e816 100644 --- a/airflow/api_connexion/schemas/version_schema.py +++ b/airflow/api_connexion/schemas/version_schema.py @@ -20,7 +20,7 @@ class VersionInfoSchema(Schema): - """Version information schema""" + """Version information schema.""" version = fields.String(dump_only=True) git_version = fields.String(dump_only=True) diff --git a/airflow/api_connexion/schemas/xcom_schema.py b/airflow/api_connexion/schemas/xcom_schema.py index d730f40554906..09d2505bf7d4d 100644 --- a/airflow/api_connexion/schemas/xcom_schema.py +++ b/airflow/api_connexion/schemas/xcom_schema.py @@ -25,10 +25,10 @@ class XComCollectionItemSchema(SQLAlchemySchema): - """Schema for a xcom item""" + """Schema for a xcom item.""" class Meta: - """Meta""" + """Meta.""" model = XCom @@ -40,20 +40,20 @@ class Meta: class XComSchema(XComCollectionItemSchema): - """XCom schema""" + """XCom schema.""" value = auto_field() class XComCollection(NamedTuple): - """List of XComs with meta""" + """List of XComs with meta.""" xcom_entries: list[XCom] total_entries: int class XComCollectionSchema(Schema): - """XCom Collection Schema""" + """XCom Collection Schema.""" xcom_entries = fields.List(fields.Nested(XComCollectionItemSchema)) total_entries = fields.Int()