Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MLflow] Removing backwards compatible code in new release #11888

Merged
merged 1 commit into from
May 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 1 addition & 36 deletions mlflow/langchain/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
_update_langchain_model_config,
patched_inference,
)
from mlflow.langchain._rag_utils import _CODE_CONFIG, _CODE_PATH, _set_config_path
from mlflow.langchain.databricks_dependencies import (
_DATABRICKS_DEPENDENCY_KEY,
_detect_databricks_dependencies,
Expand Down Expand Up @@ -85,7 +84,6 @@
)
from mlflow.utils.file_utils import get_total_file_size, write_to
from mlflow.utils.model_utils import (
FLAVOR_CONFIG_CODE,
_add_code_from_conf_to_system_path,
_get_flavor_configuration,
_validate_and_copy_code_paths,
Expand Down Expand Up @@ -288,11 +286,6 @@ def load_retriever(persist_directory):
f"Model config path '{model_config}' provided is not a valid file path. "
"Please provide a valid model configuration."
)
elif not model_config:
# If the model_config is not provided we fallback to getting the config path
# from code_paths so that is backwards compatible.
if code_paths and len(code_paths) == 1 and os.path.exists(code_paths[0]):
model_config_path = code_paths[0]

lc_model = (
_load_model_code_path(model_code_path, model_config_path)
Expand Down Expand Up @@ -362,7 +355,6 @@ def load_retriever(persist_directory):
# would be used in the model. We set the code_path here so it can be set
# globally when the model is loaded with the local path. So the consumer
# can use that path instead of the config.yml path when the model is loaded
# TODO: what if model_config is not a string / file path?
flavor_conf = (
{MODEL_CONFIG: model_config_path, MODEL_CODE_PATH: model_code_path}
if model_config_path
Expand Down Expand Up @@ -873,27 +865,6 @@ def _load_model_from_local_fs(local_model_path):
os.path.basename(flavor_code_path),
)

return _load_model_code_path(code_path, config_path)
# Code for backwards compatibility, relies on RAG utils - remove in the future
elif _CODE_CONFIG in flavor_conf:
path = flavor_conf.get(_CODE_CONFIG)
flavor_code_config = flavor_conf.get(FLAVOR_CONFIG_CODE)
if path is not None:
config_path = os.path.join(
local_model_path,
flavor_code_config,
os.path.basename(path),
)
else:
config_path = None

flavor_code_path = flavor_conf.get(_CODE_PATH, "chain.py")
code_path = os.path.join(
local_model_path,
flavor_code_config,
os.path.basename(flavor_code_path),
)

return _load_model_code_path(code_path, config_path)
else:
_add_code_from_conf_to_system_path(local_model_path, flavor_conf)
Expand Down Expand Up @@ -937,14 +908,10 @@ def _config_path_context(config_path: Optional[str] = None):
config_path = ""

_set_model_config(config_path)
# set rag utils global for backwards compatibility
_set_config_path(config_path)
try:
yield
finally:
_set_model_config(None)
# unset rag utils global for backwards compatibility
_set_config_path(None)


# In the Python's module caching mechanism, which by default, prevents the
Expand Down Expand Up @@ -972,9 +939,7 @@ def _load_model_code_path(code_path: str, config_path: Optional[str] = None):
except ImportError as e:
raise mlflow.MlflowException("Failed to import LangChain model.") from e

return (
mlflow.models.model.__mlflow_model__ or mlflow.langchain._rag_utils.__databricks_rag_chain__
)
return mlflow.models.model.__mlflow_model__


@experimental
Expand Down
32 changes: 0 additions & 32 deletions mlflow/langchain/_rag_utils.py

This file was deleted.

7 changes: 0 additions & 7 deletions mlflow/models/model_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@

import yaml

import mlflow

__mlflow_model_config__ = None


Expand All @@ -17,9 +15,6 @@ class ModelConfig:

def __init__(self, *, development_config: Optional[Union[str, Dict[str, Any]]] = None):
config = globals().get("__mlflow_model_config__", None)
# backwards compatibility
rag_config = getattr(mlflow.langchain._rag_utils, "__databricks_rag_config_path__", None)

# Here mlflow_model_config have 3 states:
# 1. None, this means if the mlflow_model_config is None, use development_config if
# available
Expand All @@ -29,8 +24,6 @@ def __init__(self, *, development_config: Optional[Union[str, Dict[str, Any]]] =
# model so use that path
if config is not None:
self.config = config
elif rag_config is not None:
self.config = rag_config
else:
self.config = development_config

Expand Down
4 changes: 2 additions & 2 deletions tests/langchain/test_langchain_model_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -2491,7 +2491,7 @@ def test_save_load_chain_as_code_with_different_names(tmp_path, chain_model_sign
artifact_path="model_path",
signature=chain_model_signature,
input_example=input_example,
code_paths=["tests/langchain/config.yml"],
model_config="tests/langchain/config.yml",
)

loaded_model = mlflow.langchain.load_model(model_info.model_uri)
Expand Down Expand Up @@ -2549,7 +2549,7 @@ def test_save_load_chain_as_code_multiple_times(tmp_path, chain_model_signature)
artifact_path="model_path",
signature=chain_model_signature,
input_example=input_example,
code_paths=[new_config_file],
model_config=new_config_file,
)

loaded_model = mlflow.langchain.load_model(model_info.model_uri)
Expand Down
Loading