Skip to content

Commit

Permalink
Replace tmpdir with tmp_path (mlflow#8603)
Browse files Browse the repository at this point in the history
* Replace tmpdir with tmp_path

Signed-off-by: harupy <hkawamura0130@gmail.com>

* Replace more tmpdirs

Signed-off-by: harupy <hkawamura0130@gmail.com>

* Fix tracking_uri_mock

Signed-off-by: harupy <hkawamura0130@gmail.com>

* Replace tmpdir with tmp_path

Signed-off-by: harupy <hkawamura0130@gmail.com>

---------

Signed-off-by: harupy <hkawamura0130@gmail.com>
  • Loading branch information
harupy committed Jun 5, 2023
1 parent ae93914 commit 8c97133
Show file tree
Hide file tree
Showing 41 changed files with 221 additions and 219 deletions.
4 changes: 2 additions & 2 deletions tests/autologging/test_autologging_behaviors_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,10 +238,10 @@ def mlflow_autolog():


@pytest.mark.usefixtures(test_mode_off.__name__)
def test_autolog_respects_silent_mode(tmpdir):
def test_autolog_respects_silent_mode(tmp_path):
# Use file-based experiment storage for this test. Otherwise, concurrent experiment creation in
# multithreaded contexts may fail for other storage backends (e.g. SQLAlchemy)
mlflow.set_tracking_uri(str(tmpdir))
mlflow.set_tracking_uri(str(tmp_path))
mlflow.set_experiment("test_experiment")

og_showwarning = warnings.showwarning
Expand Down
20 changes: 10 additions & 10 deletions tests/catboost/test_catboost_model_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,13 +78,13 @@ def reg_model():


@pytest.fixture
def model_path(tmpdir):
return tmpdir.join("model").strpath
def model_path(tmp_path):
return os.path.join(tmp_path, "model")


@pytest.fixture
def custom_env(tmpdir):
conda_env_path = os.path.join(tmpdir.strpath, "conda_env.yml")
def custom_env(tmp_path):
conda_env_path = os.path.join(tmp_path, "conda_env.yml")
_mlflow_conda_env(conda_env_path, additional_pip_deps=["catboost", "pytest"])
return conda_env_path

Expand Down Expand Up @@ -211,11 +211,11 @@ def test_model_load_from_remote_uri_succeeds(reg_model, model_path, mock_s3_buck
)


def test_log_model(cb_model, tmpdir):
def test_log_model(cb_model, tmp_path):
model, inference_dataframe = cb_model
with mlflow.start_run():
artifact_path = "model"
conda_env = os.path.join(tmpdir.strpath, "conda_env.yaml")
conda_env = os.path.join(tmp_path, "conda_env.yaml")
_mlflow_conda_env(conda_env, additional_pip_deps=["catboost"])

model_info = mlflow.catboost.log_model(model, artifact_path, conda_env=conda_env)
Expand All @@ -236,11 +236,11 @@ def test_log_model(cb_model, tmpdir):
assert os.path.exists(os.path.join(local_path, env_path))


def test_log_model_calls_register_model(cb_model, tmpdir):
def test_log_model_calls_register_model(cb_model, tmp_path):
artifact_path = "model"
registered_model_name = "registered_model"
with mlflow.start_run() as run, mock.patch("mlflow.register_model") as register_model_mock:
conda_env_path = os.path.join(tmpdir.strpath, "conda_env.yaml")
conda_env_path = os.path.join(tmp_path, "conda_env.yaml")
_mlflow_conda_env(conda_env_path, additional_pip_deps=["catboost"])
mlflow.catboost.log_model(
cb_model.model,
Expand All @@ -254,10 +254,10 @@ def test_log_model_calls_register_model(cb_model, tmpdir):
)


def test_log_model_no_registered_model_name(cb_model, tmpdir):
def test_log_model_no_registered_model_name(cb_model, tmp_path):
with mlflow.start_run(), mock.patch("mlflow.register_model") as register_model_mock:
artifact_path = "model"
conda_env_path = os.path.join(tmpdir.strpath, "conda_env.yaml")
conda_env_path = os.path.join(tmp_path, "conda_env.yaml")
_mlflow_conda_env(conda_env_path, additional_pip_deps=["catboost"])
mlflow.catboost.log_model(cb_model.model, artifact_path, conda_env=conda_env_path)
register_model_mock.assert_not_called()
Expand Down
6 changes: 3 additions & 3 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,13 @@ def set_mock(obj, attr, mock):


@pytest.fixture(autouse=True)
def tracking_uri_mock(tmpdir, request):
def tracking_uri_mock(tmp_path, request):
try:
if "notrackingurimock" not in request.keywords:
tracking_uri = path_to_local_sqlite_uri(os.path.join(tmpdir.strpath, "mlruns.sqlite"))
tracking_uri = path_to_local_sqlite_uri(os.path.join(tmp_path, "mlruns.sqlite"))
mlflow.set_tracking_uri(tracking_uri)
os.environ["MLFLOW_TRACKING_URI"] = tracking_uri
yield tmpdir
yield str(tmp_path)
finally:
mlflow.set_tracking_uri(None)
if "notrackingurimock" not in request.keywords:
Expand Down
4 changes: 2 additions & 2 deletions tests/evaluate/test_evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1133,7 +1133,7 @@ def test_evaluate_env_manager_params(multiclass_logistic_regressor_model_uri, ir


@pytest.mark.parametrize("env_manager", ["virtualenv", "conda"])
def test_evaluate_restores_env(tmpdir, env_manager, iris_dataset):
def test_evaluate_restores_env(tmp_path, env_manager, iris_dataset):
class EnvRestoringTestModel(mlflow.pyfunc.PythonModel):
def __init__(self):
pass
Expand All @@ -1154,7 +1154,7 @@ def evaluate(self, *, model, model_type, dataset, run_id, evaluator_config, **kw
y = model.predict(pd.DataFrame(dataset.features_data))
return EvaluationResult(metrics={"test": y[0]}, artifacts={})

model_path = os.path.join(str(tmpdir), "model")
model_path = os.path.join(tmp_path, "model")

mlflow.pyfunc.save_model(
path=model_path,
Expand Down
8 changes: 4 additions & 4 deletions tests/fastai/test_fastai_model_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,13 +50,13 @@ def fastai_model():


@pytest.fixture
def model_path(tmpdir):
return os.path.join(str(tmpdir), "model")
def model_path(tmp_path):
return os.path.join(tmp_path, "model")


@pytest.fixture
def fastai_custom_env(tmpdir):
conda_env = os.path.join(str(tmpdir), "conda_env.yml")
def fastai_custom_env(tmp_path):
conda_env = os.path.join(tmp_path, "conda_env.yml")
_mlflow_conda_env(conda_env, additional_pip_deps=["fastai", "pytest"])
return conda_env

Expand Down
8 changes: 4 additions & 4 deletions tests/gluon/test_gluon_model_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,13 @@


@pytest.fixture
def model_path(tmpdir):
return os.path.join(tmpdir.strpath, "model")
def model_path(tmp_path):
return os.path.join(tmp_path, "model")


@pytest.fixture
def gluon_custom_env(tmpdir):
conda_env = os.path.join(str(tmpdir), "conda_env.yml")
def gluon_custom_env(tmp_path):
conda_env = os.path.join(tmp_path, "conda_env.yml")
_mlflow_conda_env(conda_env, additional_pip_deps=["mxnet", "pytest"])
return conda_env

Expand Down
8 changes: 4 additions & 4 deletions tests/h2o/test_h2o_model_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,13 @@ def h2o_iris_model():


@pytest.fixture
def model_path(tmpdir):
return os.path.join(str(tmpdir), "model")
def model_path(tmp_path):
return os.path.join(tmp_path, "model")


@pytest.fixture
def h2o_custom_env(tmpdir):
conda_env = os.path.join(str(tmpdir), "conda_env.yml")
def h2o_custom_env(tmp_path):
conda_env = os.path.join(tmp_path, "conda_env.yml")
_mlflow_conda_env(conda_env, additional_pip_deps=["h2o", "pytest"])
return conda_env

Expand Down
8 changes: 4 additions & 4 deletions tests/lightgbm/test_lightgbm_model_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,13 @@ def lgb_sklearn_model():


@pytest.fixture
def model_path(tmpdir):
return os.path.join(str(tmpdir), "model")
def model_path(tmp_path):
return os.path.join(tmp_path, "model")


@pytest.fixture
def lgb_custom_env(tmpdir):
conda_env = os.path.join(str(tmpdir), "conda_env.yml")
def lgb_custom_env(tmp_path):
conda_env = os.path.join(tmp_path, "conda_env.yml")
_mlflow_conda_env(conda_env, additional_pip_deps=["lightgbm", "pytest"])
return conda_env

Expand Down
4 changes: 2 additions & 2 deletions tests/models/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -394,8 +394,8 @@ def test_model_uuid():
assert m4.model_uuid is None


def test_validate_schema(sklearn_knn_model, iris_data, tmpdir):
sk_model_path = os.path.join(str(tmpdir), "sk_model")
def test_validate_schema(sklearn_knn_model, iris_data, tmp_path):
sk_model_path = os.path.join(tmp_path, "sk_model")
X, y = iris_data
signature = infer_signature(X, y)
mlflow.sklearn.save_model(
Expand Down
4 changes: 2 additions & 2 deletions tests/models/test_signature.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,8 +164,8 @@ def test_set_signature_to_logged_model():
assert model_info.signature == signature


def test_set_signature_to_saved_model(tmpdir):
model_path = str(tmpdir)
def test_set_signature_to_saved_model(tmp_path):
model_path = str(tmp_path)
mlflow.sklearn.save_model(
RandomForestRegressor(),
model_path,
Expand Down
16 changes: 8 additions & 8 deletions tests/onnx/test_onnx_model_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,8 @@ def model(dataset):


@pytest.fixture
def onnx_model(model, sample_input, tmpdir):
model_path = os.path.join(str(tmpdir), "torch_onnx")
def onnx_model(model, sample_input, tmp_path):
model_path = os.path.join(tmp_path, "torch_onnx")
dynamic_axes = {"input": {0: "batch"}}
torch.onnx.export(
model, sample_input, model_path, dynamic_axes=dynamic_axes, input_names=["input"]
Expand Down Expand Up @@ -154,8 +154,8 @@ def multi_tensor_model_prediction(multi_tensor_model, data):


@pytest.fixture
def multi_tensor_onnx_model(multi_tensor_model, sample_input, tmpdir):
model_path = os.path.join(str(tmpdir), "multi_tensor_onnx")
def multi_tensor_onnx_model(multi_tensor_model, sample_input, tmp_path):
model_path = os.path.join(tmp_path, "multi_tensor_onnx")
_sample_input = torch.split(sample_input, 2, 1)
torch.onnx.export(
multi_tensor_model,
Expand Down Expand Up @@ -226,13 +226,13 @@ def predicted_multiple_inputs(data_multiple_inputs):


@pytest.fixture
def model_path(tmpdir):
return os.path.join(tmpdir.strpath, "model")
def model_path(tmp_path):
return os.path.join(tmp_path, "model")


@pytest.fixture
def onnx_custom_env(tmpdir):
conda_env = os.path.join(str(tmpdir), "conda_env.yml")
def onnx_custom_env(tmp_path):
conda_env = os.path.join(tmp_path, "conda_env.yml")
_mlflow_conda_env(conda_env, additional_pip_deps=["onnx", "pytest", "torch"])
return conda_env

Expand Down
24 changes: 12 additions & 12 deletions tests/paddle/test_paddle_model_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,13 +94,13 @@ def forward(self, inputs): # pylint: disable=arguments-differ


@pytest.fixture
def model_path(tmpdir):
return os.path.join(str(tmpdir), "model")
def model_path(tmp_path):
return os.path.join(tmp_path, "model")


@pytest.fixture
def pd_custom_env(tmpdir):
conda_env = os.path.join(str(tmpdir), "conda_env.yml")
def pd_custom_env(tmp_path):
conda_env = os.path.join(tmp_path, "conda_env.yml")
_mlflow_conda_env(conda_env, additional_pip_deps=["paddle", "pytest"])
return conda_env

Expand Down Expand Up @@ -141,11 +141,11 @@ def test_model_load_from_remote_uri_succeeds(pd_model, model_path, mock_s3_bucke
)


def test_model_log(pd_model, model_path, tmpdir):
def test_model_log(pd_model, model_path, tmp_path):
model = pd_model.model
try:
artifact_path = "model"
conda_env = os.path.join(tmpdir, "conda_env.yaml")
conda_env = os.path.join(tmp_path, "conda_env.yaml")
_mlflow_conda_env(conda_env, additional_pip_deps=["paddle"])

model_info = mlflow.paddle.log_model(
Expand Down Expand Up @@ -346,12 +346,12 @@ def test_model_built_in_high_level_api_load_from_remote_uri_succeeds(
)


def test_model_built_in_high_level_api_log(pd_model_built_in_high_level_api, model_path, tmpdir):
def test_model_built_in_high_level_api_log(pd_model_built_in_high_level_api, model_path, tmp_path):
model = pd_model_built_in_high_level_api.model
test_dataset = pd_model_built_in_high_level_api.inference_dataframe
try:
artifact_path = "model"
conda_env = os.path.join(tmpdir, "conda_env.yaml")
conda_env = os.path.join(tmp_path, "conda_env.yaml")
_mlflow_conda_env(conda_env, additional_pip_deps=["paddle"])

mlflow.paddle.log_model(pd_model=model, artifact_path=artifact_path, conda_env=conda_env)
Expand All @@ -378,8 +378,8 @@ def test_model_built_in_high_level_api_log(pd_model_built_in_high_level_api, mod


@pytest.fixture
def model_retrain_path(tmpdir):
return os.path.join(str(tmpdir), "model_retrain")
def model_retrain_path(tmp_path):
return os.path.join(tmp_path, "model_retrain")


@pytest.mark.allow_infer_pip_requirements_fallback
Expand Down Expand Up @@ -432,14 +432,14 @@ def test_model_retrain_built_in_high_level_api(


def test_log_model_built_in_high_level_api(
pd_model_built_in_high_level_api, model_path, tmpdir, get_dataset_built_in_high_level_api
pd_model_built_in_high_level_api, model_path, tmp_path, get_dataset_built_in_high_level_api
):
model = pd_model_built_in_high_level_api.model
test_dataset = get_dataset_built_in_high_level_api[1]

try:
artifact_path = "model"
conda_env = os.path.join(tmpdir, "conda_env.yaml")
conda_env = os.path.join(tmp_path, "conda_env.yaml")
_mlflow_conda_env(conda_env, additional_pip_deps=["paddle"])

mlflow.paddle.log_model(
Expand Down
12 changes: 8 additions & 4 deletions tests/projects/test_docker_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def _build_uri(base_uri, subdirectory):

@pytest.mark.parametrize("use_start_run", map(str, [0, 1]))
def test_docker_project_execution(
use_start_run, tmpdir, docker_example_base_image
use_start_run, docker_example_base_image
): # pylint: disable=unused-argument
expected_params = {"use_start_run": use_start_run}
submitted_run = mlflow.projects.run(
Expand Down Expand Up @@ -76,7 +76,7 @@ def test_docker_project_execution(


def test_docker_project_execution_async_docker_args(
tmpdir, docker_example_base_image
docker_example_base_image,
): # pylint: disable=unused-argument
submitted_run = mlflow.projects.run(
TEST_DOCKER_PROJECT_DIR,
Expand Down Expand Up @@ -106,15 +106,19 @@ def test_docker_project_execution_async_docker_args(
)
@mock.patch("databricks_cli.configure.provider.ProfileConfigProvider")
def test_docker_project_tracking_uri_propagation(
ProfileConfigProvider, tmpdir, tracking_uri, expected_command_segment, docker_example_base_image
ProfileConfigProvider,
tmp_path,
tracking_uri,
expected_command_segment,
docker_example_base_image,
): # pylint: disable=unused-argument
mock_provider = mock.MagicMock()
mock_provider.get_config.return_value = DatabricksConfig.from_password(
"host", "user", "pass", insecure=True
)
ProfileConfigProvider.return_value = mock_provider
# Create and mock local tracking directory
local_tracking_dir = os.path.join(tmpdir.strpath, "mlruns")
local_tracking_dir = os.path.join(tmp_path, "mlruns")
if tracking_uri is None:
tracking_uri = local_tracking_dir
old_uri = mlflow.get_tracking_uri()
Expand Down
2 changes: 1 addition & 1 deletion tests/projects/test_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ def test_run(use_start_run):
assert tags[MLFLOW_PROJECT_ENTRY_POINT] == "test_tracking"


def test_run_with_parent(tmpdir): # pylint: disable=unused-argument
def test_run_with_parent():
"""Verify that if we are in a nested run, mlflow.projects.run() will have a parent_run_id."""
with mlflow.start_run():
parent_run_id = mlflow.active_run().info.run_id
Expand Down
Loading

0 comments on commit 8c97133

Please sign in to comment.