diff --git a/.pylintrc b/.pylintrc index 991797a09bf5..617ec7dd1276 100644 --- a/.pylintrc +++ b/.pylintrc @@ -16,7 +16,7 @@ # W1201, W1202 disable log format warning. False positives (I think) # W0707 disable raise-missing-from which we cant use because py2 back compat -disable=C,R,duplicate-code,W0511,W1201,W1202,W0707,no-init +disable=C,R,duplicate-code,W0511,W1201,W1202,W0707,no-init,broad-except,bare-except # See: https://github.com/getsentry/responses/issues/74 [TYPECHECK] diff --git a/examples/airline_demo/airline_demo_tests/conftest.py b/examples/airline_demo/airline_demo_tests/conftest.py index e92903c6fdc1..fe1f8762659e 100644 --- a/examples/airline_demo/airline_demo_tests/conftest.py +++ b/examples/airline_demo/airline_demo_tests/conftest.py @@ -27,7 +27,7 @@ def is_postgres_running(): # header, one line for container, trailing \n return len(lines) == 3 - except: # pylint: disable=bare-except + except: return False @@ -67,7 +67,7 @@ def postgres(pg_hostname): # pylint: disable=redefined-outer-name try: subprocess.check_output(["docker-compose", "stop", "test-postgres-db-airline"]) subprocess.check_output(["docker-compose", "rm", "-f", "test-postgres-db-airline"]) - except Exception: # pylint: disable=broad-except + except Exception: pass subprocess.check_output(["docker-compose", "up", "-d", "test-postgres-db-airline"]) diff --git a/examples/dbt_example/dbt_example_tests/conftest.py b/examples/dbt_example/dbt_example_tests/conftest.py index fc1e22951d6e..c465bde37483 100644 --- a/examples/dbt_example/dbt_example_tests/conftest.py +++ b/examples/dbt_example/dbt_example_tests/conftest.py @@ -27,7 +27,7 @@ def is_postgres_running(): # header, one line for container, trailing \n return len(lines) == 3 - except: # pylint: disable=bare-except + except: return False @@ -65,7 +65,7 @@ def postgres(pg_hostname): # pylint: disable=redefined-outer-name subprocess.check_output( ["docker-compose", "rm", "-f", "dbt_example_postgresql"] ) - except Exception: # pylint: disable=broad-except + except Exception: pass subprocess.check_output(["docker-compose", "up", "-d", "dbt_example_postgresql"]) diff --git a/integration_tests/python_modules/dagster-k8s-test-infra/dagster_k8s_test_infra/cluster.py b/integration_tests/python_modules/dagster-k8s-test-infra/dagster_k8s_test_infra/cluster.py index 1c6046e20282..32d2ed869a81 100644 --- a/integration_tests/python_modules/dagster-k8s-test-infra/dagster_k8s_test_infra/cluster.py +++ b/integration_tests/python_modules/dagster-k8s-test-infra/dagster_k8s_test_infra/cluster.py @@ -152,7 +152,7 @@ def local_port_forward_postgres(namespace): ) conn.close() break - except: # pylint: disable=bare-except, broad-except + except: time.sleep(1) continue @@ -363,7 +363,7 @@ def check_export_runs(instance): try: export_run(instance, run, output_file) - except Exception as e: # pylint: disable=broad-except + except Exception as e: print(f"Hit an error exporting dagster-debug {output_file}: {e}") continue diff --git a/python_modules/dagit/dagit/graphql.py b/python_modules/dagit/dagit/graphql.py index 5f137201f88b..04816a3476b4 100644 --- a/python_modules/dagit/dagit/graphql.py +++ b/python_modules/dagit/dagit/graphql.py @@ -227,7 +227,7 @@ async def _handle_async_results(results: AsyncGenerator, operation_id: str, webs payload["errors"] = [format_graphql_error(err) for err in result.errors] await _send_message(websocket, GraphQLWS.DATA, payload, operation_id) - except Exception as error: # pylint: disable=broad-except + except Exception as error: if not isinstance(error, GraphQLError): error = GraphQLError(str(error)) diff --git a/python_modules/dagit/dagit/telemetry.py b/python_modules/dagit/dagit/telemetry.py index fe73f53250de..9119f91cdd76 100644 --- a/python_modules/dagit/dagit/telemetry.py +++ b/python_modules/dagit/dagit/telemetry.py @@ -64,7 +64,7 @@ def upload_logs(stop_event, raise_errors=False): in_progress = False stop_event.wait(600) # Sleep for 10 minutes - except Exception: # pylint: disable=broad-except + except Exception: if raise_errors: raise @@ -107,6 +107,6 @@ def _upload_logs(dagster_log_dir, log_size, dagster_log_queue_dir, raise_errors) if success: os.remove(curr_full_path) - except Exception: # pylint: disable=broad-except + except Exception: if raise_errors: raise diff --git a/python_modules/dagster-graphql/dagster_graphql/implementation/external.py b/python_modules/dagster-graphql/dagster_graphql/implementation/external.py index fc20b27fbaf6..2a8994522211 100644 --- a/python_modules/dagster-graphql/dagster_graphql/implementation/external.py +++ b/python_modules/dagster-graphql/dagster_graphql/implementation/external.py @@ -59,7 +59,7 @@ def get_subset_external_pipeline(context, selector): try: subset_result = repository_location.get_subset_external_pipeline_result(selector) - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) raise UserFacingGraphQLError( GrapheneInvalidSubsetError( diff --git a/python_modules/dagster-graphql/dagster_graphql/implementation/utils.py b/python_modules/dagster-graphql/dagster_graphql/implementation/utils.py index b299d5938893..96d2719b2492 100644 --- a/python_modules/dagster-graphql/dagster_graphql/implementation/utils.py +++ b/python_modules/dagster-graphql/dagster_graphql/implementation/utils.py @@ -34,7 +34,7 @@ def _fn(*args, **kwargs): return fn(*args, **kwargs) except UserFacingGraphQLError as de_exception: return de_exception.error - except Exception: # pylint: disable=broad-except + except Exception: return GraphenePythonError(serializable_error_info_from_exc_info(sys.exc_info())) return _fn diff --git a/python_modules/dagster-graphql/dagster_graphql/schema/instigation.py b/python_modules/dagster-graphql/dagster_graphql/schema/instigation.py index d0ce5fb40628..32187e82daac 100644 --- a/python_modules/dagster-graphql/dagster_graphql/schema/instigation.py +++ b/python_modules/dagster-graphql/dagster_graphql/schema/instigation.py @@ -182,7 +182,7 @@ def resolve_evaluationResult(self, graphene_info): schedule_name=external_schedule.name, scheduled_execution_time=schedule_time, ) - except Exception: # pylint: disable=broad-except + except Exception: schedule_data = serializable_error_info_from_exc_info(sys.exc_info()) return GrapheneTickEvaluation(schedule_data) diff --git a/python_modules/dagster/dagster/cli/pipeline.py b/python_modules/dagster/dagster/cli/pipeline.py index 833abaef1cce..7a99b5e04503 100644 --- a/python_modules/dagster/dagster/cli/pipeline.py +++ b/python_modules/dagster/dagster/cli/pipeline.py @@ -913,7 +913,7 @@ def _execute_backfill_command_at_location( repo_handle, partition_set_name, ) - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) raise DagsterBackfillFailedError( "Failure fetching partition names: {error_message}".format( @@ -958,7 +958,7 @@ def _execute_backfill_command_at_location( partition_names=partition_names, ) ) - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) instance.add_backfill( backfill_job.with_status(BulkActionStatus.FAILED).with_error(error_info) diff --git a/python_modules/dagster/dagster/cli/sensor.py b/python_modules/dagster/dagster/cli/sensor.py index d6f2c85dc9d9..9eeef4ac17f5 100644 --- a/python_modules/dagster/dagster/cli/sensor.py +++ b/python_modules/dagster/dagster/cli/sensor.py @@ -284,7 +284,7 @@ def execute_preview_command( last_run_key, cursor, ) - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) print_fn( "Failed to resolve sensor for {sensor_name} : {error_info}".format( diff --git a/python_modules/dagster/dagster/core/definitions/inference.py b/python_modules/dagster/dagster/core/definitions/inference.py index ec7c5af3daee..7e3734f29842 100644 --- a/python_modules/dagster/dagster/core/definitions/inference.py +++ b/python_modules/dagster/dagster/core/definitions/inference.py @@ -32,7 +32,7 @@ def _infer_input_description_from_docstring(fn: Callable) -> Dict[str, Optional[ try: docstring = parse(doc_str) return {p.arg_name: p.description for p in docstring.params} - except Exception: # pylint: disable=broad-except + except Exception: return {} @@ -48,7 +48,7 @@ def _infer_output_description_from_docstring(fn: Callable) -> Optional[str]: return None return docstring.returns.description - except Exception: # pylint: disable=broad-except + except Exception: return None diff --git a/python_modules/dagster/dagster/core/definitions/reconstructable.py b/python_modules/dagster/dagster/core/definitions/reconstructable.py index ac05f542b68a..6f86af7375f0 100644 --- a/python_modules/dagster/dagster/core/definitions/reconstructable.py +++ b/python_modules/dagster/dagster/core/definitions/reconstructable.py @@ -340,7 +340,7 @@ def make_bar_job(): and inspect.getmodule(target).__name__ != "__main__" ): return ReconstructablePipeline.for_module(target.__module__, target.__name__) - except: # pylint: disable=bare-except + except: pass python_file = get_python_file_from_target(target) diff --git a/python_modules/dagster/dagster/core/definitions/utils.py b/python_modules/dagster/dagster/core/definitions/utils.py index d4840bb36f21..727abee9e36d 100644 --- a/python_modules/dagster/dagster/core/definitions/utils.py +++ b/python_modules/dagster/dagster/core/definitions/utils.py @@ -91,7 +91,7 @@ def validate_tags(tags: Optional[Dict[str, Any]]) -> Dict[str, Any]: ) valid = seven.json.loads(str_val) == value - except Exception: # pylint: disable=broad-except + except Exception: pass if not valid: diff --git a/python_modules/dagster/dagster/core/execution/api.py b/python_modules/dagster/dagster/core/execution/api.py index 09d82c4cc8fa..3ecd6c1a0487 100644 --- a/python_modules/dagster/dagster/core/execution/api.py +++ b/python_modules/dagster/dagster/core/execution/api.py @@ -787,7 +787,7 @@ def pipeline_execution_iterator( pipeline_canceled_info = serializable_error_info_from_exc_info(sys.exc_info()) if pipeline_context.raise_on_error: raise - except Exception: # pylint: disable=broad-except + except Exception: pipeline_exception_info = serializable_error_info_from_exc_info(sys.exc_info()) if pipeline_context.raise_on_error: raise # finally block will run before this is re-raised diff --git a/python_modules/dagster/dagster/core/execution/plan/execute_plan.py b/python_modules/dagster/dagster/core/execution/plan/execute_plan.py index 0d1efe3da0e5..23e9e79e789b 100644 --- a/python_modules/dagster/dagster/core/execution/plan/execute_plan.py +++ b/python_modules/dagster/dagster/core/execution/plan/execute_plan.py @@ -284,7 +284,7 @@ def _dagster_event_sequence_for_step(step_context: StepExecutionContext) -> Iter raise dagster_error # case (6) in top comment - except Exception as unexpected_exception: # pylint: disable=broad-except + except Exception as unexpected_exception: step_context.capture_step_exception(unexpected_exception) yield step_failure_event_from_exc_info( step_context, diff --git a/python_modules/dagster/dagster/core/executor/child_process_executor.py b/python_modules/dagster/dagster/core/executor/child_process_executor.py index 0a0a82a9cedc..ffb91100562d 100644 --- a/python_modules/dagster/dagster/core/executor/child_process_executor.py +++ b/python_modules/dagster/dagster/core/executor/child_process_executor.py @@ -66,7 +66,6 @@ def _execute_command_in_child_process(event_queue, command): event_queue.put(step_event) event_queue.put(ChildProcessDoneEvent(pid=pid)) - # pylint: disable=broad-except except ( Exception, KeyboardInterrupt, diff --git a/python_modules/dagster/dagster/core/host_representation/grpc_server_registry.py b/python_modules/dagster/dagster/core/host_representation/grpc_server_registry.py index 399f832b1430..6864fb94651d 100644 --- a/python_modules/dagster/dagster/core/host_representation/grpc_server_registry.py +++ b/python_modules/dagster/dagster/core/host_representation/grpc_server_registry.py @@ -181,7 +181,7 @@ def _get_grpc_endpoint(self, repository_location_origin): startup_timeout=self._startup_timeout, ) self._all_processes.append(server_process) - except Exception: # pylint: disable=broad-except + except Exception: server_process = serializable_error_info_from_exc_info(sys.exc_info()) new_server_id = None diff --git a/python_modules/dagster/dagster/core/storage/event_log/in_memory.py b/python_modules/dagster/dagster/core/storage/event_log/in_memory.py index fa35163c93d7..82b34bc03527 100644 --- a/python_modules/dagster/dagster/core/storage/event_log/in_memory.py +++ b/python_modules/dagster/dagster/core/storage/event_log/in_memory.py @@ -96,7 +96,7 @@ def store_event(self, event): for handler in handlers: try: handler(event) - except Exception: # pylint: disable=broad-except + except Exception: logging.exception("Exception in callback for event watch on run %s.", run_id) def delete_events(self, run_id): diff --git a/python_modules/dagster/dagster/core/storage/event_log/sqlite/consolidated_sqlite_event_log.py b/python_modules/dagster/dagster/core/storage/event_log/sqlite/consolidated_sqlite_event_log.py index d7e4b4771da8..d9fec6018732 100644 --- a/python_modules/dagster/dagster/core/storage/event_log/sqlite/consolidated_sqlite_event_log.py +++ b/python_modules/dagster/dagster/core/storage/event_log/sqlite/consolidated_sqlite_event_log.py @@ -162,7 +162,7 @@ def on_modified(self): status = None try: status = callback(event) - except Exception: # pylint: disable=broad-except + except Exception: logging.exception("Exception in callback for event watch on run %s.", run_id) if ( diff --git a/python_modules/dagster/dagster/core/storage/event_log/sqlite/sqlite_event_log.py b/python_modules/dagster/dagster/core/storage/event_log/sqlite/sqlite_event_log.py index ba5b35f3c597..2f1b3a7fd79d 100644 --- a/python_modules/dagster/dagster/core/storage/event_log/sqlite/sqlite_event_log.py +++ b/python_modules/dagster/dagster/core/storage/event_log/sqlite/sqlite_event_log.py @@ -417,7 +417,7 @@ def _process_log(self): status = None try: status = self._cb(event) - except Exception: # pylint: disable=broad-except + except Exception: logging.exception("Exception in callback for event watch on run %s.", self._run_id) if ( diff --git a/python_modules/dagster/dagster/core/storage/sql.py b/python_modules/dagster/dagster/core/storage/sql.py index 74c02702a6e6..003ef8e0c61d 100644 --- a/python_modules/dagster/dagster/core/storage/sql.py +++ b/python_modules/dagster/dagster/core/storage/sql.py @@ -68,7 +68,7 @@ def handle_schema_errors(conn, alembic_config, msg=None): db_revision, head_revision = check_alembic_revision(alembic_config, conn) # If exceptions were raised during the revision check, we want to swallow them and # allow the original exception to fall through - except Exception: # pylint: disable=broad-except + except Exception: pass if db_revision != head_revision: diff --git a/python_modules/dagster/dagster/core/telemetry.py b/python_modules/dagster/dagster/core/telemetry.py index 569a427f7342..f8b72741322d 100644 --- a/python_modules/dagster/dagster/core/telemetry.py +++ b/python_modules/dagster/dagster/core/telemetry.py @@ -324,7 +324,7 @@ def _set_telemetry_instance_id(): with open(telemetry_id_path, "w") as telemetry_id_file: yaml.dump({INSTANCE_ID_STR: instance_id}, telemetry_id_file, default_flow_style=False) return instance_id - except Exception: # pylint: disable=broad-except + except Exception: return "<>" diff --git a/python_modules/dagster/dagster/core/workspace/context.py b/python_modules/dagster/dagster/core/workspace/context.py index c4a12496efce..c3aff3269602 100644 --- a/python_modules/dagster/dagster/core/workspace/context.py +++ b/python_modules/dagster/dagster/core/workspace/context.py @@ -528,7 +528,7 @@ def _load_location(self, origin): error = None try: location = self._create_location_from_origin(origin) - except Exception: # pylint: disable=broad-except + except Exception: error = serializable_error_info_from_exc_info(sys.exc_info()) warnings.warn( "Error loading repository location {location_name}:{error_string}".format( diff --git a/python_modules/dagster/dagster/daemon/backfill.py b/python_modules/dagster/dagster/daemon/backfill.py index a72835430387..98ea14aea16f 100644 --- a/python_modules/dagster/dagster/daemon/backfill.py +++ b/python_modules/dagster/dagster/daemon/backfill.py @@ -105,7 +105,7 @@ def execute_backfill_iteration(instance, workspace, logger, debug_crash_flags=No ) instance.update_backfill(backfill_job.with_status(BulkActionStatus.COMPLETED)) yield - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) instance.update_backfill( backfill_job.with_status(BulkActionStatus.FAILED).with_error(error_info) diff --git a/python_modules/dagster/dagster/daemon/controller.py b/python_modules/dagster/dagster/daemon/controller.py index 8a771f09f77e..b095d906d163 100644 --- a/python_modules/dagster/dagster/daemon/controller.py +++ b/python_modules/dagster/dagster/daemon/controller.py @@ -186,7 +186,7 @@ def _daemon_heartbeat_healthy(self, daemon_type): if is_healthy: self._last_healthy_heartbeat_times[daemon_type] = now return is_healthy - except Exception: # pylint: disable=broad-except + except Exception: self._logger.warning( "Error attempting to check {daemon_type} heartbeat:".format( daemon_type=daemon_type, diff --git a/python_modules/dagster/dagster/daemon/daemon.py b/python_modules/dagster/dagster/daemon/daemon.py index fc5bb42c90f0..0c8d25c9279d 100644 --- a/python_modules/dagster/dagster/daemon/daemon.py +++ b/python_modules/dagster/dagster/daemon/daemon.py @@ -102,7 +102,7 @@ def run_loop( heartbeat_interval_seconds, error_interval_seconds, ) - except Exception: # pylint: disable=broad-except + except Exception: self._logger.error( "Failed to add heartbeat: \n{}".format( serializable_error_info_from_exc_info(sys.exc_info()) @@ -135,7 +135,7 @@ def _run_iteration( self._errors.appendleft((result, pendulum.now("UTC"))) except StopIteration: break - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) self._logger.error("Caught error:\n{}".format(error_info)) self._errors.appendleft((error_info, pendulum.now("UTC"))) @@ -148,7 +148,7 @@ def _run_iteration( heartbeat_interval_seconds, error_interval_seconds, ) - except Exception: # pylint: disable=broad-except + except Exception: self._logger.error( "Failed to add heartbeat: \n{}".format( serializable_error_info_from_exc_info(sys.exc_info()) diff --git a/python_modules/dagster/dagster/daemon/monitoring/monitoring_daemon.py b/python_modules/dagster/dagster/daemon/monitoring/monitoring_daemon.py index e103c63a7ae3..4b89ee1a9016 100644 --- a/python_modules/dagster/dagster/daemon/monitoring/monitoring_daemon.py +++ b/python_modules/dagster/dagster/daemon/monitoring/monitoring_daemon.py @@ -88,7 +88,7 @@ def execute_monitoring_iteration(instance, workspace, logger, _debug_crash_flags pass else: check.invariant(False, f"Unexpected run status: {run.status}") - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) logger.error(f"Hit error while monitoring run {run.run_id}: " f"{str(error_info)}") yield error_info diff --git a/python_modules/dagster/dagster/daemon/run_coordinator/queued_run_coordinator_daemon.py b/python_modules/dagster/dagster/daemon/run_coordinator/queued_run_coordinator_daemon.py index 36de4bb53220..155300e1fbbc 100644 --- a/python_modules/dagster/dagster/daemon/run_coordinator/queued_run_coordinator_daemon.py +++ b/python_modules/dagster/dagster/daemon/run_coordinator/queued_run_coordinator_daemon.py @@ -147,7 +147,7 @@ def run_iteration(self, instance, workspace): try: self._dequeue_run(instance, run, workspace) - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) message = ( diff --git a/python_modules/dagster/dagster/daemon/sensor.py b/python_modules/dagster/dagster/daemon/sensor.py index a2a5a93876ca..0db5b0e966d3 100644 --- a/python_modules/dagster/dagster/daemon/sensor.py +++ b/python_modules/dagster/dagster/daemon/sensor.py @@ -237,7 +237,7 @@ def execute_sensor_iteration( job_state, sensor_debug_crash_flags, ) - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) logger.error( "Sensor daemon caught an error for sensor {sensor_name} : {error_info}".format( @@ -365,7 +365,7 @@ def _evaluate_sensor( run_id=run.run_id, sensor_name=external_sensor.name ) ) - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) context.logger.error( f"Run {run.run_id} created successfully but failed to launch: " f"{str(error_info)}" diff --git a/python_modules/dagster/dagster/grpc/client.py b/python_modules/dagster/dagster/grpc/client.py index 44f83835e216..8a47148115bd 100644 --- a/python_modules/dagster/dagster/grpc/client.py +++ b/python_modules/dagster/dagster/grpc/client.py @@ -363,7 +363,7 @@ def start_run(self, execute_run_args): ) return res.serialized_start_run_result - except Exception: # pylint: disable=bare-except + except Exception: pipeline_run = instance.get_run_by_id(execute_run_args.pipeline_run_id) instance.report_engine_event( message="Unexpected error in IPC client", diff --git a/python_modules/dagster/dagster/grpc/impl.py b/python_modules/dagster/dagster/grpc/impl.py index cc62b602b84a..726d231d068f 100644 --- a/python_modules/dagster/dagster/grpc/impl.py +++ b/python_modules/dagster/dagster/grpc/impl.py @@ -73,7 +73,7 @@ def core_execute_run(recon_pipeline, pipeline_run, instance, resume_from_failure # try to load the pipeline definition early try: recon_pipeline.get_definition() - except Exception: # pylint: disable=broad-except + except Exception: yield instance.report_engine_event( "Could not load pipeline definition.", pipeline_run, @@ -92,7 +92,7 @@ def core_execute_run(recon_pipeline, pipeline_run, instance, resume_from_failure message="Run execution terminated by interrupt", pipeline_run=pipeline_run, ) - except Exception: # pylint: disable=broad-except + except Exception: yield instance.report_engine_event( "An exception was thrown during execution that is likely a framework error, " "rather than an error in user code.", @@ -128,7 +128,7 @@ def _run_in_subprocess( pid = os.getpid() - except: # pylint: disable=bare-except + except: serializable_error_info = serializable_error_info_from_exc_info(sys.exc_info()) event = IPCErrorMessage( serializable_error_info=serializable_error_info, @@ -357,7 +357,7 @@ def get_external_execution_plan_snapshot(recon_pipeline, args): ), args.pipeline_snapshot_id, ) - except: # pylint: disable=bare-except + except: return ExecutionPlanSnapshotErrorData( error=serializable_error_info_from_exc_info(sys.exc_info()) ) diff --git a/python_modules/dagster/dagster/grpc/server.py b/python_modules/dagster/dagster/grpc/server.py index de236cc22ffb..624d92f1db31 100644 --- a/python_modules/dagster/dagster/grpc/server.py +++ b/python_modules/dagster/dagster/grpc/server.py @@ -203,7 +203,7 @@ def __init__( ) try: self._repository_symbols_and_code_pointers.load() - except Exception: # pylint:disable=broad-except + except Exception: if not lazy_load_user_code: raise self._serializable_load_error = serializable_error_info_from_exc_info(sys.exc_info()) @@ -563,7 +563,7 @@ def ShutdownServer(self, request, _context): ShutdownServerResult(success=True, serializable_error_info=None) ) ) - except: # pylint: disable=bare-except + except: return api_pb2.ShutdownServerReply( serialized_shutdown_server_result=serialize_dagster_namedtuple( ShutdownServerResult( @@ -590,7 +590,7 @@ def CancelExecution(self, request, _context): self._termination_times[cancel_execution_request.run_id] = time.time() success = True - except: # pylint: disable=bare-except + except: serializable_error_info = serializable_error_info_from_exc_info(sys.exc_info()) return api_pb2.CancelExecutionReply( @@ -640,7 +640,7 @@ def StartRun(self, request, _context): run_id = execute_run_args.pipeline_run_id recon_pipeline = self._recon_pipeline_from_origin(execute_run_args.pipeline_origin) - except: # pylint: disable=bare-except + except: return api_pb2.StartRunReply( serialized_start_run_result=serialize_dagster_namedtuple( StartRunResult( diff --git a/python_modules/dagster/dagster/scheduler/scheduler.py b/python_modules/dagster/dagster/scheduler/scheduler.py index f3405f831e47..80096c0a153b 100644 --- a/python_modules/dagster/dagster/scheduler/scheduler.py +++ b/python_modules/dagster/dagster/scheduler/scheduler.py @@ -95,7 +95,7 @@ def launch_scheduled_runs( max_catchup_runs, (debug_crash_flags.get(schedule_state.job_name) if debug_crash_flags else None), ) - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) logger.error( f"Scheduler caught an error for schedule {schedule_state.job_name} : {error_info.to_string()}" @@ -307,7 +307,7 @@ def _schedule_runs_at_time( try: instance.submit_run(run.run_id, workspace) logger.info(f"Completed scheduled launch of run {run.run_id} for {schedule_name}") - except Exception: # pylint: disable=broad-except + except Exception: error_info = serializable_error_info_from_exc_info(sys.exc_info()) logger.error( f"Run {run.run_id} created successfully but failed to launch: {str(serializable_error_info_from_exc_info(sys.exc_info()))}" @@ -363,7 +363,7 @@ def _create_scheduler_run( known_state=None, ) execution_plan_snapshot = external_execution_plan.execution_plan_snapshot - except Exception: # pylint: disable=broad-except + except Exception: execution_plan_errors.append(serializable_error_info_from_exc_info(sys.exc_info())) pipeline_tags = external_pipeline.tags or {} diff --git a/python_modules/dagster/dagster/serdes/ipc.py b/python_modules/dagster/dagster/serdes/ipc.py index e7046b4102ac..73db92d71a0c 100644 --- a/python_modules/dagster/dagster/serdes/ipc.py +++ b/python_modules/dagster/dagster/serdes/ipc.py @@ -114,7 +114,7 @@ def ipc_write_stream(file_path): _send(file_path, IPCStartMessage()) try: yield FileBasedWriteStream(file_path) - except Exception: # pylint: disable=broad-except + except Exception: _send_error(file_path, sys.exc_info(), message=None) finally: _send(file_path, IPCEndMessage()) diff --git a/python_modules/dagster/dagster/utils/temp_file.py b/python_modules/dagster/dagster/utils/temp_file.py index c2aed6502aaa..030b94c6becb 100644 --- a/python_modules/dagster/dagster/utils/temp_file.py +++ b/python_modules/dagster/dagster/utils/temp_file.py @@ -12,7 +12,7 @@ def _unlink_swallow_errors(path): check.str_param(path, "path") try: os.unlink(path) - except Exception: # pylint: disable=broad-except + except Exception: pass diff --git a/python_modules/dagster/dagster_tests/conftest.py b/python_modules/dagster/dagster_tests/conftest.py index 2be1e4f3d2b6..b1a4b9d5919a 100644 --- a/python_modules/dagster/dagster_tests/conftest.py +++ b/python_modules/dagster/dagster_tests/conftest.py @@ -94,7 +94,7 @@ def docker_service_up(docker_compose_file, service_name): ["docker-compose", "-f", docker_compose_file, "rm", "-f", service_name], env=env, ) - except Exception: # pylint: disable=broad-except + except Exception: pass subprocess.check_output( @@ -113,7 +113,7 @@ def docker_service_up(docker_compose_file, service_name): ["docker-compose", "-f", docker_compose_file, "rm", "-f", service_name], env=env, ) - except Exception: # pylint: disable=broad-except + except Exception: pass diff --git a/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_grpc_server_registry.py b/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_grpc_server_registry.py index 44175b7c5a50..fc5d4deddd0c 100644 --- a/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_grpc_server_registry.py +++ b/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_grpc_server_registry.py @@ -41,7 +41,7 @@ def _can_connect(origin, endpoint): watch_server=False, ): return True - except Exception: # pylint: disable=broad-except + except Exception: return False diff --git a/python_modules/dagster/dagster_tests/core_tests/storage_tests/test_event_log.py b/python_modules/dagster/dagster_tests/core_tests/storage_tests/test_event_log.py index fa9e97f05b33..301f2665df8d 100644 --- a/python_modules/dagster/dagster_tests/core_tests/storage_tests/test_event_log.py +++ b/python_modules/dagster/dagster_tests/core_tests/storage_tests/test_event_log.py @@ -82,7 +82,7 @@ def cmd(self, exceptions, tmpdir_path): storage = SqliteEventLogStorage(tmpdir_path) try: storage.get_logs_for_run_by_log_id("foo") - except Exception as exc: # pylint: disable=broad-except + except Exception as exc: exceptions.put(exc) exc_info = sys.exc_info() traceback.print_tb(exc_info[2]) diff --git a/python_modules/dagster/dagster_tests/daemon_tests/test_dagster_daemon_health.py b/python_modules/dagster/dagster_tests/daemon_tests/test_dagster_daemon_health.py index a3a80bb80677..6fca26e8e949 100644 --- a/python_modules/dagster/dagster_tests/daemon_tests/test_dagster_daemon_health.py +++ b/python_modules/dagster/dagster_tests/daemon_tests/test_dagster_daemon_health.py @@ -131,7 +131,7 @@ def run_iteration_error(_, _instance, _workspace): if iteration_ran["ran"] and status.healthy: try: controller.check_daemon_threads() # Should eventually throw since the sensor thread is interrupted - except Exception as e: # pylint: disable=broad-except + except Exception as e: assert ( "Stopping dagster-daemon process since the following threads are no longer running: ['SENSOR']" in str(e) diff --git a/python_modules/dagster/dagster_tests/execution_tests/test_interrupt.py b/python_modules/dagster/dagster_tests/execution_tests/test_interrupt.py index 6732a00fe3f9..9e867b4d28dd 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/test_interrupt.py +++ b/python_modules/dagster/dagster_tests/execution_tests/test_interrupt.py @@ -194,7 +194,7 @@ def test_capture_interrupt(): with capture_interrupts(): try: _send_interrupt_to_self() - except: # pylint: disable=bare-except + except: inner_interrupt = True assert not inner_interrupt @@ -216,9 +216,9 @@ def test_capture_interrupt(): with capture_interrupts(): try: time.sleep(5) - except: # pylint: disable=bare-except + except: inner_interrupt = True - except: # pylint: disable=bare-except + except: outer_interrupt = True assert not outer_interrupt @@ -249,7 +249,7 @@ def test_interrupt_inside_nested_delay_and_raise(): except DagsterExecutionInterruptedError: interrupt_inside_nested_raise = True - except: # pylint: disable=bare-except + except: interrupt_after_delay = True assert interrupt_inside_nested_raise @@ -266,11 +266,11 @@ def test_no_interrupt_after_nested_delay_and_raise(): with raise_execution_interrupts(): try: time.sleep(5) - except: # pylint: disable=bare-except + except: interrupt_inside_nested_raise = True _send_interrupt_to_self() - except: # pylint: disable=bare-except + except: interrupt_after_delay = True assert not interrupt_inside_nested_raise @@ -289,7 +289,7 @@ def test_calling_raise_execution_interrupts_also_raises_any_captured_interrupts( pass except DagsterExecutionInterruptedError: interrupt_from_raise_execution_interrupts = True - except: # pylint: disable=bare-except + except: interrupt_after_delay = True assert interrupt_from_raise_execution_interrupts diff --git a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_persistent.py b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_persistent.py index 94e5312ae03a..fe4aa6edf7a5 100644 --- a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_persistent.py +++ b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_persistent.py @@ -291,7 +291,7 @@ def test_load_timeout(): timeout=0.01, ) assert False, "server should have timed out" - except Exception as e: # pylint: disable=broad-except + except Exception as e: timeout_exception = e finally: diff --git a/python_modules/dagster/dagster_tests/general_tests/test_serdes_rpc.py b/python_modules/dagster/dagster_tests/general_tests/test_serdes_rpc.py index b67907cd6a88..38154bca7cea 100644 --- a/python_modules/dagster/dagster_tests/general_tests/test_serdes_rpc.py +++ b/python_modules/dagster/dagster_tests/general_tests/test_serdes_rpc.py @@ -62,7 +62,7 @@ def test_write_error_with_custom_message(): with ipc_write_stream(filename) as stream: try: raise Exception("uh oh") - except: # pylint: disable=bare-except + except: stream.send_error(sys.exc_info(), message="custom") messages = [] diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow/dagster_pipeline_factory.py b/python_modules/libraries/dagster-airflow/dagster_airflow/dagster_pipeline_factory.py index f920b9e4d8ef..0f95259f5dcb 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow/dagster_pipeline_factory.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow/dagster_pipeline_factory.py @@ -213,7 +213,7 @@ def make_repo_from_dir(): safe_mode=safe_mode, store_serialized_dags=store_serialized_dags, ) - except Exception: # pylint: disable=broad-except + except Exception: raise DagsterAirflowError("Error initializing airflow.models.dagbag object with arguments") return make_dagster_repo_from_airflow_dag_bag(dag_bag, repo_name, use_airflow_template_context) diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow/operators/docker_operator.py b/python_modules/libraries/dagster-airflow/dagster_airflow/operators/docker_operator.py index bc3d825b9d52..2464f3324121 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow/operators/docker_operator.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow/operators/docker_operator.py @@ -61,7 +61,7 @@ def __init__(self, dagster_operator_parameters, *args): if not self.docker_conn_id_set: try: from_env().version() - except Exception: # pylint: disable=broad-except + except Exception: pass else: kwargs["docker_conn_id"] = True @@ -255,7 +255,7 @@ def execute(self, context): try: events = [deserialize_json_to_dagster_namedtuple(line) for line in res if line] - except Exception: # pylint: disable=broad-except + except Exception: raise AirflowException( "Could not parse response {response}".format(response=repr(res)) ) diff --git a/python_modules/libraries/dagster-aws/dagster_aws/cloudwatch/loggers.py b/python_modules/libraries/dagster-aws/dagster_aws/cloudwatch/loggers.py index 56fbbe67f753..53eea73763c6 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws/cloudwatch/loggers.py +++ b/python_modules/libraries/dagster-aws/dagster_aws/cloudwatch/loggers.py @@ -107,7 +107,7 @@ def log_error(self, record, exc): logging.error( "Attempted to log: {record}".format(record=seven.json.dumps(record.__dict__)) ) - except Exception: # pylint: disable=broad-except + except Exception: pass logging.exception(str(exc)) diff --git a/python_modules/libraries/dagster-aws/dagster_aws/redshift/resources.py b/python_modules/libraries/dagster-aws/dagster_aws/redshift/resources.py index 41e112303894..d3e4a6bc7447 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws/redshift/resources.py +++ b/python_modules/libraries/dagster-aws/dagster_aws/redshift/resources.py @@ -80,7 +80,7 @@ def execute_query(self, query, fetch_results=False, cursor_factory=None, error_c else: self.log.info("Empty result from query") - except Exception as e: # pylint: disable=broad-except + except Exception as e: # If autocommit is disabled or not set (it is disabled by default), Redshift # will be in the middle of a transaction at exception time, and because of # the failure the current transaction will not accept any further queries. @@ -141,7 +141,7 @@ def execute_queries( results.append([]) self.log.info("Empty result from query") - except Exception as e: # pylint: disable=broad-except + except Exception as e: # If autocommit is disabled or not set (it is disabled by default), Redshift # will be in the middle of a transaction at exception time, and because of # the failure the current transaction will not accept any further queries. diff --git a/python_modules/libraries/dagster-aws/dagster_aws/utils/mrjob/retry.py b/python_modules/libraries/dagster-aws/dagster_aws/utils/mrjob/retry.py index 60f00f905d03..843117a51465 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws/utils/mrjob/retry.py +++ b/python_modules/libraries/dagster-aws/dagster_aws/utils/mrjob/retry.py @@ -116,7 +116,7 @@ def call_and_maybe_retry(*args, **kwargs): while not self.__max_tries or tries < self.__max_tries: try: return f(*args, **kwargs) - except Exception as ex: # pylint: disable=broad-except + except Exception as ex: if self.__retry_if(ex) and ( tries < self.__max_tries - 1 or not self.__max_tries ): diff --git a/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/launcher.py b/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/launcher.py index f6fce7b1a671..567ff323fedd 100644 --- a/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/launcher.py +++ b/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/launcher.py @@ -314,7 +314,7 @@ def terminate(self, run_id): cls=self.__class__, ) return termination_result - except Exception: # pylint: disable=broad-except + except Exception: self._instance.report_engine_event( message="Dagster Job was not terminated successfully; encountered error in delete_job", pipeline_run=run, diff --git a/python_modules/libraries/dagster-celery/dagster_celery/core_execution_loop.py b/python_modules/libraries/dagster-celery/dagster_celery/core_execution_loop.py index 0978a5600f7b..c007b0eb50ea 100644 --- a/python_modules/libraries/dagster-celery/dagster_celery/core_execution_loop.py +++ b/python_modules/libraries/dagster-celery/dagster_celery/core_execution_loop.py @@ -87,7 +87,7 @@ def core_celery_execution_loop(pipeline_context, execution_plan, step_execution_ EngineEventData(marker_end=DELEGATE_MARKER), step_handle=active_execution.get_step_by_key(step_key).handle, ) - except Exception: # pylint: disable=broad-except + except Exception: # We will want to do more to handle the exception here.. maybe subclass Task # Certainly yield an engine or pipeline event step_events = [] diff --git a/python_modules/libraries/dagster-celery/dagster_celery_tests/conftest.py b/python_modules/libraries/dagster-celery/dagster_celery_tests/conftest.py index 757bd60a7856..7d4f1ca1278f 100644 --- a/python_modules/libraries/dagster-celery/dagster_celery_tests/conftest.py +++ b/python_modules/libraries/dagster-celery/dagster_celery_tests/conftest.py @@ -35,7 +35,7 @@ def rabbitmq(): # pylint: disable=redefined-outer-name subprocess.check_output( ["docker-compose", "-f", docker_compose_file, "rm", "-f", service_name], ) - except Exception: # pylint: disable=broad-except + except Exception: pass subprocess.check_output(["docker-compose", "-f", docker_compose_file, "up", "-d", service_name]) @@ -57,7 +57,7 @@ def rabbitmq(): # pylint: disable=redefined-outer-name subprocess.check_output( ["docker-compose", "-f", docker_compose_file, "rm", "-f", service_name] ) - except Exception: # pylint: disable=broad-except + except Exception: pass diff --git a/python_modules/libraries/dagster-docker/dagster_docker/docker_executor.py b/python_modules/libraries/dagster-docker/dagster_docker/docker_executor.py index 25a4aefe2934..c2304898b6f4 100644 --- a/python_modules/libraries/dagster-docker/dagster_docker/docker_executor.py +++ b/python_modules/libraries/dagster-docker/dagster_docker/docker_executor.py @@ -189,7 +189,7 @@ def check_step_health(self, step_handler_context: StepHandlerContext) -> List[Da try: container = client.containers.get(container_name) - except Exception as e: # pylint: disable=broad-except + except Exception as e: return [ DagsterEvent( event_type_value=DagsterEventType.STEP_FAILURE.value, @@ -208,7 +208,7 @@ def check_step_health(self, step_handler_context: StepHandlerContext) -> List[Da try: container_info = container.wait(timeout=0.1) - except Exception as e: # pylint: disable=broad-except + except Exception as e: return [ DagsterEvent( event_type_value=DagsterEventType.STEP_FAILURE.value, @@ -266,7 +266,7 @@ def terminate_step(self, step_handler_context: StepHandlerContext) -> List[Dagst ) ) container.stop() - except Exception as e: # pylint: disable=broad-except + except Exception as e: events.append( DagsterEvent( event_type_value=DagsterEventType.ENGINE_EVENT.value, diff --git a/python_modules/libraries/dagster-docker/dagster_docker/docker_run_launcher.py b/python_modules/libraries/dagster-docker/dagster_docker/docker_run_launcher.py index 747dcb91ac63..72a82e6d34fb 100644 --- a/python_modules/libraries/dagster-docker/dagster_docker/docker_run_launcher.py +++ b/python_modules/libraries/dagster-docker/dagster_docker/docker_run_launcher.py @@ -175,7 +175,7 @@ def _get_container(self, run): try: return self._get_client().containers.get(container_id) - except Exception: # pylint: disable=broad-except + except Exception: return None def can_terminate(self, run_id): diff --git a/python_modules/libraries/dagster-k8s/dagster_k8s/client.py b/python_modules/libraries/dagster-k8s/dagster_k8s/client.py index 406049ba9186..c390c673a4c7 100644 --- a/python_modules/libraries/dagster-k8s/dagster_k8s/client.py +++ b/python_modules/libraries/dagster-k8s/dagster_k8s/client.py @@ -315,13 +315,13 @@ def delete_job( errors = [] try: self.batch_api.delete_namespaced_job(name=job_name, namespace=namespace) - except Exception as e: # pylint: disable=broad-except + except Exception as e: errors.append(e) for pod_name in pod_names: try: self.core_api.delete_namespaced_pod(name=pod_name, namespace=namespace) - except Exception as e: # pylint: disable=broad-except + except Exception as e: errors.append(e) if len(errors) > 0: diff --git a/python_modules/libraries/dagster-k8s/dagster_k8s/launcher.py b/python_modules/libraries/dagster-k8s/dagster_k8s/launcher.py index 2f55f4e0c4c8..74a1af7a0436 100644 --- a/python_modules/libraries/dagster-k8s/dagster_k8s/launcher.py +++ b/python_modules/libraries/dagster-k8s/dagster_k8s/launcher.py @@ -389,7 +389,7 @@ def terminate(self, run_id): cls=self.__class__, ) return termination_result - except Exception: # pylint: disable=broad-except + except Exception: self._instance.report_engine_event( message="Run was not terminated successfully; encountered error in delete_job", pipeline_run=run, @@ -409,7 +409,7 @@ def check_run_worker_health(self, run: PipelineRun): ) try: job = self._batch_api.read_namespaced_job(namespace=self.job_namespace, name=job_name) - except Exception: # pylint: disable=broad-except + except Exception: return CheckRunHealthResult( WorkerStatus.UNKNOWN, str(serializable_error_info_from_exc_info(sys.exc_info())) ) diff --git a/python_modules/libraries/dagster-mlflow/dagster_mlflow/resources.py b/python_modules/libraries/dagster-mlflow/dagster_mlflow/resources.py index e83d72ceb898..bf7a38daa941 100644 --- a/python_modules/libraries/dagster-mlflow/dagster_mlflow/resources.py +++ b/python_modules/libraries/dagster-mlflow/dagster_mlflow/resources.py @@ -162,7 +162,7 @@ def _start_run(self, **kwargs): f"Starting a new mlflow run with id {run.info.run_id} " f"in experiment {self.experiment_name}" ) - except Exception as ex: # pylint: disable=broad-except + except Exception as ex: run = mlflow.active_run() if "is already active" not in str(ex): raise (ex) diff --git a/python_modules/libraries/dagster-postgres/dagster_postgres/event_log/event_log.py b/python_modules/libraries/dagster-postgres/dagster_postgres/event_log/event_log.py index bd3c486fbd62..e3228dad911c 100644 --- a/python_modules/libraries/dagster-postgres/dagster_postgres/event_log/event_log.py +++ b/python_modules/libraries/dagster-postgres/dagster_postgres/event_log/event_log.py @@ -296,7 +296,7 @@ def watcher_thread( if callback_with_cursor.start_cursor < index: try: callback_with_cursor.callback(dagster_event) - except Exception: # pylint: disable=broad-except + except Exception: logging.exception( "Exception in callback for event watch on run %s.", run_id ) diff --git a/python_modules/libraries/dagstermill/dagstermill/factory.py b/python_modules/libraries/dagstermill/dagstermill/factory.py index a22c128cd88f..e7968580478a 100644 --- a/python_modules/libraries/dagstermill/dagstermill/factory.py +++ b/python_modules/libraries/dagstermill/dagstermill/factory.py @@ -209,7 +209,7 @@ def _t_fn(step_context, inputs): log_output=True, ) - except Exception as ex: # pylint: disable=broad-except + except Exception as ex: step_execution_context.log.warn( "Error when attempting to materialize executed notebook: {exc}".format( exc=str(serializable_error_info_from_exc_info(sys.exc_info())) @@ -260,7 +260,7 @@ def _t_fn(step_context, inputs): ], ) - except Exception: # pylint: disable=broad-except + except Exception: # if file manager writing errors, e.g. file manager is not provided, we throw a warning # and fall back to the previously stored temp executed notebook. step_context.log.warning( diff --git a/python_modules/libraries/dagstermill/dagstermill/manager.py b/python_modules/libraries/dagstermill/dagstermill/manager.py index c4819277b539..1be2df518234 100644 --- a/python_modules/libraries/dagstermill/dagstermill/manager.py +++ b/python_modules/libraries/dagstermill/dagstermill/manager.py @@ -136,7 +136,7 @@ def reconstitute_pipeline_context( try: instance_ref = unpack_value(instance_ref_dict) instance = DagsterInstance.from_ref(instance_ref) - except Exception as err: # pylint: disable=broad-except + except Exception as err: raise DagstermillError( "Error when attempting to resolve DagsterInstance from serialized InstanceRef" ) from err