Traceback (most recent call last): File "/tmp/spark-2b49f4a9-488f-4214-b42b-3d3332d170ad/emr_serverless_main.py", line 21, in main(sys.argv[1], sys.argv[2]) # type: ignore[no-untyped-call] File "/home/hadoop/environment/lib/python3.10/site-packages/dagster_aws/emr/emr_step_main.py", line 51, in main list(run_step_from_ref(step_run_ref, instance)) File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/execution/plan/external_step.py", line 240, in run_step_from_ref step_context = step_run_ref_to_step_context(step_run_ref, instance) File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/execution/plan/external_step.py", line 199, in step_run_ref_to_step_context execution_plan = create_execution_plan( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/execution/api.py", line 939, in create_execution_plan pipeline_def = pipeline.get_definition() File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/reconstruct.py", line 229, in get_definition return self.repository.get_definition().get_maybe_subset_job_def( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/reconstruct.py", line 117, in get_definition return repository_def_from_pointer(self.pointer, self.repository_load_data) File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/reconstruct.py", line 782, in repository_def_from_pointer target = def_from_pointer(pointer) File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/reconstruct.py", line 672, in def_from_pointer target = pointer.load_target() File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/code_pointer.py", line 224, in load_target module = load_python_module(self.module, self.working_directory) File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/code_pointer.py", line 138, in load_python_module return importlib.import_module(module_name) File "/home/hadoop/environment/lib/python3.10/importlib/__init__.py", line 126, in import_module return _bootstrap._gcd_import(name[level:], package, level) File "", line 1050, in _gcd_import File "", line 1027, in _find_and_load File "", line 1006, in _find_and_load_unlocked File "", line 688, in _load_unlocked File "", line 883, in exec_module File "", line 241, in _call_with_frames_removed File "/tmp/spark-2b49f4a9-488f-4214-b42b-3d3332d170ad/code.zip/binance/definitions.py", line 11, in File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/definitions_class.py", line 253, in __init__ self._created_pending_or_normal_repo = _create_repository_using_definitions_args( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/definitions_class.py", line 142, in _create_repository_using_definitions_args def created_repo(): File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/decorators/repository_decorator.py", line 129, in __call__ else CachingRepositoryData.from_list( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/repository_definition/repository_data.py", line 468, in from_list return build_caching_repository_data_from_list( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/repository_definition/repository_data_builder.py", line 218, in build_caching_repository_data_from_list resolved_job = unresolved_job_def.resolve( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/unresolved_asset_job_definition.py", line 193, in resolve return build_asset_selection_job( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/asset_layer.py", line 786, in build_asset_selection_job asset_job = build_assets_job( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/assets_job.py", line 205, in build_assets_job return graph.to_job( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/graph_definition.py", line 612, in to_job return JobDefinition( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/job_definition.py", line 204, in __init__ config_mapping = _config_mapping_with_default_value( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_core/definitions/job_definition.py", line 966, in _config_mapping_with_default_value updated_fields[name] = Field( File "/home/hadoop/environment/lib/python3.10/site-packages/dagster/_config/field.py", line 333, in __init__ raise DagsterInvalidConfigError( dagster._core.errors.DagsterInvalidConfigError: Invalid default_value for Field. Error 1: Missing required config entries ['application_id', 'execution_role_arn', 'region_name', 'staging_bucket'] at path root:pyspark_step_launcher:config. Sample config for missing entries: {'application_id': '...', 'execution_role_arn': '...', 'region_name': '...', 'staging_bucket': '...'}