Skip to content

Commit

Permalink
RFC: Remove broad-except and bare-except lint warnings (#5492)
Browse files Browse the repository at this point in the history
Summary:
These warnings are extremely low-signal (esp in a library that does general exception handling), I don't think I've ever changed my code as result of seeing the warning pop up. I claim that we should remove them.
  • Loading branch information
gibsondan committed Nov 2, 2021
1 parent 7e97d35 commit 6971d50
Show file tree
Hide file tree
Showing 59 changed files with 90 additions and 91 deletions.
2 changes: 1 addition & 1 deletion .pylintrc
Expand Up @@ -16,7 +16,7 @@
# W1201, W1202 disable log format warning. False positives (I think)
# W0707 disable raise-missing-from which we cant use because py2 back compat

disable=C,R,duplicate-code,W0511,W1201,W1202,W0707,no-init
disable=C,R,duplicate-code,W0511,W1201,W1202,W0707,no-init,broad-except,bare-except

# See: https://github.com/getsentry/responses/issues/74
[TYPECHECK]
Expand Down
4 changes: 2 additions & 2 deletions examples/airline_demo/airline_demo_tests/conftest.py
Expand Up @@ -27,7 +27,7 @@ def is_postgres_running():

# header, one line for container, trailing \n
return len(lines) == 3
except: # pylint: disable=bare-except
except:
return False


Expand Down Expand Up @@ -67,7 +67,7 @@ def postgres(pg_hostname): # pylint: disable=redefined-outer-name
try:
subprocess.check_output(["docker-compose", "stop", "test-postgres-db-airline"])
subprocess.check_output(["docker-compose", "rm", "-f", "test-postgres-db-airline"])
except Exception: # pylint: disable=broad-except
except Exception:
pass
subprocess.check_output(["docker-compose", "up", "-d", "test-postgres-db-airline"])

Expand Down
4 changes: 2 additions & 2 deletions examples/dbt_example/dbt_example_tests/conftest.py
Expand Up @@ -27,7 +27,7 @@ def is_postgres_running():

# header, one line for container, trailing \n
return len(lines) == 3
except: # pylint: disable=bare-except
except:
return False


Expand Down Expand Up @@ -65,7 +65,7 @@ def postgres(pg_hostname): # pylint: disable=redefined-outer-name
subprocess.check_output(
["docker-compose", "rm", "-f", "dbt_example_postgresql"]
)
except Exception: # pylint: disable=broad-except
except Exception:
pass
subprocess.check_output(["docker-compose", "up", "-d", "dbt_example_postgresql"])

Expand Down
Expand Up @@ -152,7 +152,7 @@ def local_port_forward_postgres(namespace):
)
conn.close()
break
except: # pylint: disable=bare-except, broad-except
except:
time.sleep(1)
continue

Expand Down Expand Up @@ -363,7 +363,7 @@ def check_export_runs(instance):

try:
export_run(instance, run, output_file)
except Exception as e: # pylint: disable=broad-except
except Exception as e:
print(f"Hit an error exporting dagster-debug {output_file}: {e}")
continue

Expand Down
2 changes: 1 addition & 1 deletion python_modules/dagit/dagit/graphql.py
Expand Up @@ -227,7 +227,7 @@ async def _handle_async_results(results: AsyncGenerator, operation_id: str, webs
payload["errors"] = [format_graphql_error(err) for err in result.errors]

await _send_message(websocket, GraphQLWS.DATA, payload, operation_id)
except Exception as error: # pylint: disable=broad-except
except Exception as error:
if not isinstance(error, GraphQLError):
error = GraphQLError(str(error))

Expand Down
4 changes: 2 additions & 2 deletions python_modules/dagit/dagit/telemetry.py
Expand Up @@ -64,7 +64,7 @@ def upload_logs(stop_event, raise_errors=False):
in_progress = False

stop_event.wait(600) # Sleep for 10 minutes
except Exception: # pylint: disable=broad-except
except Exception:
if raise_errors:
raise

Expand Down Expand Up @@ -107,6 +107,6 @@ def _upload_logs(dagster_log_dir, log_size, dagster_log_queue_dir, raise_errors)
if success:
os.remove(curr_full_path)

except Exception: # pylint: disable=broad-except
except Exception:
if raise_errors:
raise
Expand Up @@ -59,7 +59,7 @@ def get_subset_external_pipeline(context, selector):

try:
subset_result = repository_location.get_subset_external_pipeline_result(selector)
except Exception: # pylint: disable=broad-except
except Exception:
error_info = serializable_error_info_from_exc_info(sys.exc_info())
raise UserFacingGraphQLError(
GrapheneInvalidSubsetError(
Expand Down
Expand Up @@ -34,7 +34,7 @@ def _fn(*args, **kwargs):
return fn(*args, **kwargs)
except UserFacingGraphQLError as de_exception:
return de_exception.error
except Exception: # pylint: disable=broad-except
except Exception:
return GraphenePythonError(serializable_error_info_from_exc_info(sys.exc_info()))

return _fn
Expand Down
Expand Up @@ -182,7 +182,7 @@ def resolve_evaluationResult(self, graphene_info):
schedule_name=external_schedule.name,
scheduled_execution_time=schedule_time,
)
except Exception: # pylint: disable=broad-except
except Exception:
schedule_data = serializable_error_info_from_exc_info(sys.exc_info())

return GrapheneTickEvaluation(schedule_data)
Expand Down
4 changes: 2 additions & 2 deletions python_modules/dagster/dagster/cli/pipeline.py
Expand Up @@ -913,7 +913,7 @@ def _execute_backfill_command_at_location(
repo_handle,
partition_set_name,
)
except Exception: # pylint: disable=broad-except
except Exception:
error_info = serializable_error_info_from_exc_info(sys.exc_info())
raise DagsterBackfillFailedError(
"Failure fetching partition names: {error_message}".format(
Expand Down Expand Up @@ -958,7 +958,7 @@ def _execute_backfill_command_at_location(
partition_names=partition_names,
)
)
except Exception: # pylint: disable=broad-except
except Exception:
error_info = serializable_error_info_from_exc_info(sys.exc_info())
instance.add_backfill(
backfill_job.with_status(BulkActionStatus.FAILED).with_error(error_info)
Expand Down
2 changes: 1 addition & 1 deletion python_modules/dagster/dagster/cli/sensor.py
Expand Up @@ -284,7 +284,7 @@ def execute_preview_command(
last_run_key,
cursor,
)
except Exception: # pylint: disable=broad-except
except Exception:
error_info = serializable_error_info_from_exc_info(sys.exc_info())
print_fn(
"Failed to resolve sensor for {sensor_name} : {error_info}".format(
Expand Down
4 changes: 2 additions & 2 deletions python_modules/dagster/dagster/core/definitions/inference.py
Expand Up @@ -32,7 +32,7 @@ def _infer_input_description_from_docstring(fn: Callable) -> Dict[str, Optional[
try:
docstring = parse(doc_str)
return {p.arg_name: p.description for p in docstring.params}
except Exception: # pylint: disable=broad-except
except Exception:
return {}


Expand All @@ -48,7 +48,7 @@ def _infer_output_description_from_docstring(fn: Callable) -> Optional[str]:
return None

return docstring.returns.description
except Exception: # pylint: disable=broad-except
except Exception:
return None


Expand Down
Expand Up @@ -340,7 +340,7 @@ def make_bar_job():
and inspect.getmodule(target).__name__ != "__main__"
):
return ReconstructablePipeline.for_module(target.__module__, target.__name__)
except: # pylint: disable=bare-except
except:
pass

python_file = get_python_file_from_target(target)
Expand Down
2 changes: 1 addition & 1 deletion python_modules/dagster/dagster/core/definitions/utils.py
Expand Up @@ -91,7 +91,7 @@ def validate_tags(tags: Optional[Dict[str, Any]]) -> Dict[str, Any]:
)

valid = seven.json.loads(str_val) == value
except Exception: # pylint: disable=broad-except
except Exception:
pass

if not valid:
Expand Down
2 changes: 1 addition & 1 deletion python_modules/dagster/dagster/core/execution/api.py
Expand Up @@ -787,7 +787,7 @@ def pipeline_execution_iterator(
pipeline_canceled_info = serializable_error_info_from_exc_info(sys.exc_info())
if pipeline_context.raise_on_error:
raise
except Exception: # pylint: disable=broad-except
except Exception:
pipeline_exception_info = serializable_error_info_from_exc_info(sys.exc_info())
if pipeline_context.raise_on_error:
raise # finally block will run before this is re-raised
Expand Down
Expand Up @@ -284,7 +284,7 @@ def _dagster_event_sequence_for_step(step_context: StepExecutionContext) -> Iter
raise dagster_error

# case (6) in top comment
except Exception as unexpected_exception: # pylint: disable=broad-except
except Exception as unexpected_exception:
step_context.capture_step_exception(unexpected_exception)
yield step_failure_event_from_exc_info(
step_context,
Expand Down
Expand Up @@ -66,7 +66,6 @@ def _execute_command_in_child_process(event_queue, command):
event_queue.put(step_event)
event_queue.put(ChildProcessDoneEvent(pid=pid))

# pylint: disable=broad-except
except (
Exception,
KeyboardInterrupt,
Expand Down
Expand Up @@ -181,7 +181,7 @@ def _get_grpc_endpoint(self, repository_location_origin):
startup_timeout=self._startup_timeout,
)
self._all_processes.append(server_process)
except Exception: # pylint: disable=broad-except
except Exception:
server_process = serializable_error_info_from_exc_info(sys.exc_info())
new_server_id = None

Expand Down
Expand Up @@ -96,7 +96,7 @@ def store_event(self, event):
for handler in handlers:
try:
handler(event)
except Exception: # pylint: disable=broad-except
except Exception:
logging.exception("Exception in callback for event watch on run %s.", run_id)

def delete_events(self, run_id):
Expand Down
Expand Up @@ -162,7 +162,7 @@ def on_modified(self):
status = None
try:
status = callback(event)
except Exception: # pylint: disable=broad-except
except Exception:
logging.exception("Exception in callback for event watch on run %s.", run_id)

if (
Expand Down
Expand Up @@ -417,7 +417,7 @@ def _process_log(self):
status = None
try:
status = self._cb(event)
except Exception: # pylint: disable=broad-except
except Exception:
logging.exception("Exception in callback for event watch on run %s.", self._run_id)

if (
Expand Down
2 changes: 1 addition & 1 deletion python_modules/dagster/dagster/core/storage/sql.py
Expand Up @@ -68,7 +68,7 @@ def handle_schema_errors(conn, alembic_config, msg=None):
db_revision, head_revision = check_alembic_revision(alembic_config, conn)
# If exceptions were raised during the revision check, we want to swallow them and
# allow the original exception to fall through
except Exception: # pylint: disable=broad-except
except Exception:
pass

if db_revision != head_revision:
Expand Down
2 changes: 1 addition & 1 deletion python_modules/dagster/dagster/core/telemetry.py
Expand Up @@ -324,7 +324,7 @@ def _set_telemetry_instance_id():
with open(telemetry_id_path, "w") as telemetry_id_file:
yaml.dump({INSTANCE_ID_STR: instance_id}, telemetry_id_file, default_flow_style=False)
return instance_id
except Exception: # pylint: disable=broad-except
except Exception:
return "<<unable_to_write_instance_id>>"


Expand Down
2 changes: 1 addition & 1 deletion python_modules/dagster/dagster/core/workspace/context.py
Expand Up @@ -528,7 +528,7 @@ def _load_location(self, origin):
error = None
try:
location = self._create_location_from_origin(origin)
except Exception: # pylint: disable=broad-except
except Exception:
error = serializable_error_info_from_exc_info(sys.exc_info())
warnings.warn(
"Error loading repository location {location_name}:{error_string}".format(
Expand Down
2 changes: 1 addition & 1 deletion python_modules/dagster/dagster/daemon/backfill.py
Expand Up @@ -105,7 +105,7 @@ def execute_backfill_iteration(instance, workspace, logger, debug_crash_flags=No
)
instance.update_backfill(backfill_job.with_status(BulkActionStatus.COMPLETED))
yield
except Exception: # pylint: disable=broad-except
except Exception:
error_info = serializable_error_info_from_exc_info(sys.exc_info())
instance.update_backfill(
backfill_job.with_status(BulkActionStatus.FAILED).with_error(error_info)
Expand Down
2 changes: 1 addition & 1 deletion python_modules/dagster/dagster/daemon/controller.py
Expand Up @@ -186,7 +186,7 @@ def _daemon_heartbeat_healthy(self, daemon_type):
if is_healthy:
self._last_healthy_heartbeat_times[daemon_type] = now
return is_healthy
except Exception: # pylint: disable=broad-except
except Exception:
self._logger.warning(
"Error attempting to check {daemon_type} heartbeat:".format(
daemon_type=daemon_type,
Expand Down
6 changes: 3 additions & 3 deletions python_modules/dagster/dagster/daemon/daemon.py
Expand Up @@ -102,7 +102,7 @@ def run_loop(
heartbeat_interval_seconds,
error_interval_seconds,
)
except Exception: # pylint: disable=broad-except
except Exception:
self._logger.error(
"Failed to add heartbeat: \n{}".format(
serializable_error_info_from_exc_info(sys.exc_info())
Expand Down Expand Up @@ -135,7 +135,7 @@ def _run_iteration(
self._errors.appendleft((result, pendulum.now("UTC")))
except StopIteration:
break
except Exception: # pylint: disable=broad-except
except Exception:
error_info = serializable_error_info_from_exc_info(sys.exc_info())
self._logger.error("Caught error:\n{}".format(error_info))
self._errors.appendleft((error_info, pendulum.now("UTC")))
Expand All @@ -148,7 +148,7 @@ def _run_iteration(
heartbeat_interval_seconds,
error_interval_seconds,
)
except Exception: # pylint: disable=broad-except
except Exception:
self._logger.error(
"Failed to add heartbeat: \n{}".format(
serializable_error_info_from_exc_info(sys.exc_info())
Expand Down
Expand Up @@ -88,7 +88,7 @@ def execute_monitoring_iteration(instance, workspace, logger, _debug_crash_flags
pass
else:
check.invariant(False, f"Unexpected run status: {run.status}")
except Exception: # pylint: disable=broad-except
except Exception:
error_info = serializable_error_info_from_exc_info(sys.exc_info())
logger.error(f"Hit error while monitoring run {run.run_id}: " f"{str(error_info)}")
yield error_info
Expand Down
Expand Up @@ -147,7 +147,7 @@ def run_iteration(self, instance, workspace):

try:
self._dequeue_run(instance, run, workspace)
except Exception: # pylint: disable=broad-except
except Exception:
error_info = serializable_error_info_from_exc_info(sys.exc_info())

message = (
Expand Down
4 changes: 2 additions & 2 deletions python_modules/dagster/dagster/daemon/sensor.py
Expand Up @@ -237,7 +237,7 @@ def execute_sensor_iteration(
job_state,
sensor_debug_crash_flags,
)
except Exception: # pylint: disable=broad-except
except Exception:
error_info = serializable_error_info_from_exc_info(sys.exc_info())
logger.error(
"Sensor daemon caught an error for sensor {sensor_name} : {error_info}".format(
Expand Down Expand Up @@ -365,7 +365,7 @@ def _evaluate_sensor(
run_id=run.run_id, sensor_name=external_sensor.name
)
)
except Exception: # pylint: disable=broad-except
except Exception:
error_info = serializable_error_info_from_exc_info(sys.exc_info())
context.logger.error(
f"Run {run.run_id} created successfully but failed to launch: " f"{str(error_info)}"
Expand Down
2 changes: 1 addition & 1 deletion python_modules/dagster/dagster/grpc/client.py
Expand Up @@ -363,7 +363,7 @@ def start_run(self, execute_run_args):
)
return res.serialized_start_run_result

except Exception: # pylint: disable=bare-except
except Exception:
pipeline_run = instance.get_run_by_id(execute_run_args.pipeline_run_id)
instance.report_engine_event(
message="Unexpected error in IPC client",
Expand Down
8 changes: 4 additions & 4 deletions python_modules/dagster/dagster/grpc/impl.py
Expand Up @@ -73,7 +73,7 @@ def core_execute_run(recon_pipeline, pipeline_run, instance, resume_from_failure
# try to load the pipeline definition early
try:
recon_pipeline.get_definition()
except Exception: # pylint: disable=broad-except
except Exception:
yield instance.report_engine_event(
"Could not load pipeline definition.",
pipeline_run,
Expand All @@ -92,7 +92,7 @@ def core_execute_run(recon_pipeline, pipeline_run, instance, resume_from_failure
message="Run execution terminated by interrupt",
pipeline_run=pipeline_run,
)
except Exception: # pylint: disable=broad-except
except Exception:
yield instance.report_engine_event(
"An exception was thrown during execution that is likely a framework error, "
"rather than an error in user code.",
Expand Down Expand Up @@ -128,7 +128,7 @@ def _run_in_subprocess(

pid = os.getpid()

except: # pylint: disable=bare-except
except:
serializable_error_info = serializable_error_info_from_exc_info(sys.exc_info())
event = IPCErrorMessage(
serializable_error_info=serializable_error_info,
Expand Down Expand Up @@ -357,7 +357,7 @@ def get_external_execution_plan_snapshot(recon_pipeline, args):
),
args.pipeline_snapshot_id,
)
except: # pylint: disable=bare-except
except:
return ExecutionPlanSnapshotErrorData(
error=serializable_error_info_from_exc_info(sys.exc_info())
)
Expand Down

0 comments on commit 6971d50

Please sign in to comment.