-
Notifications
You must be signed in to change notification settings - Fork 4.1k
/
__init__.py
1481 lines (1265 loc) · 60.7 KB
/
__init__.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
The ``mlflow.tensorflow`` module provides an API for logging and loading TensorFlow models.
This module exports TensorFlow models with the following flavors:
TensorFlow (native) format
This is the main flavor that can be loaded back into TensorFlow.
:py:mod:`mlflow.pyfunc`
Produced for use by generic pyfunc-based deployment tools and batch inference.
"""
import importlib
import logging
import os
import shutil
import tempfile
from typing import Any, Dict, NamedTuple, Optional
import numpy as np
import pandas
import yaml
from packaging.version import Version
import mlflow
from mlflow import pyfunc
from mlflow.data.code_dataset_source import CodeDatasetSource
from mlflow.data.numpy_dataset import from_numpy
from mlflow.data.tensorflow_dataset import from_tensorflow
from mlflow.exceptions import INVALID_PARAMETER_VALUE, MlflowException
from mlflow.models import Model, ModelInputExample, ModelSignature, infer_signature
from mlflow.models.model import MLMODEL_FILE_NAME
from mlflow.models.signature import _infer_signature_from_input_example
from mlflow.models.utils import _save_example
from mlflow.tensorflow.callback import MlflowCallback, MlflowModelCheckpointCallback # noqa: F401
from mlflow.tracking._model_registry import DEFAULT_AWAIT_MAX_SLEEP_SECONDS
from mlflow.tracking.artifact_utils import _download_artifact_from_uri
from mlflow.tracking.context import registry as context_registry
from mlflow.tracking.fluent import _shut_down_async_logging
from mlflow.types.schema import TensorSpec
from mlflow.utils import is_iterator
from mlflow.utils.autologging_utils import (
PatchFunction,
autologging_integration,
get_autologging_config,
log_fn_args_as_params,
picklable_exception_safe_function,
resolve_input_example_and_signature,
safe_patch,
)
from mlflow.utils.checkpoint_utils import (
_WEIGHT_ONLY_CHECKPOINT_SUFFIX,
download_checkpoint_artifact,
)
from mlflow.utils.docstring_utils import LOG_MODEL_PARAM_DOCS, format_docstring
from mlflow.utils.environment import (
_CONDA_ENV_FILE_NAME,
_CONSTRAINTS_FILE_NAME,
_PYTHON_ENV_FILE_NAME,
_REQUIREMENTS_FILE_NAME,
_mlflow_conda_env,
_process_conda_env,
_process_pip_requirements,
_PythonEnv,
_validate_env_arguments,
)
from mlflow.utils.file_utils import TempDir, get_total_file_size, write_to
from mlflow.utils.model_utils import (
_add_code_from_conf_to_system_path,
_get_flavor_configuration,
_validate_and_copy_code_paths,
_validate_and_prepare_target_save_path,
)
from mlflow.utils.requirements_utils import _get_pinned_requirement
FLAVOR_NAME = "tensorflow"
_logger = logging.getLogger(__name__)
# For tracking if the run was started by autologging.
_AUTOLOG_RUN_ID = None
# File name to which custom objects cloudpickle is saved - used during save and load
_CUSTOM_OBJECTS_SAVE_PATH = "custom_objects.cloudpickle"
# File name to which custom objects stored in tensorflow _GLOBAL_CUSTOM_OBJECTS
# is saved - it is automatically detected and used during save and load
_GLOBAL_CUSTOM_OBJECTS_SAVE_PATH = "global_custom_objects.cloudpickle"
_KERAS_MODULE_SPEC_PATH = "keras_module.txt"
_KERAS_SAVE_FORMAT_PATH = "save_format.txt"
# File name to which keras model is saved
_MODEL_SAVE_PATH = "model"
_MODEL_TYPE_KERAS = "keras"
_MODEL_TYPE_TF1_ESTIMATOR = "tf1-estimator"
_MODEL_TYPE_TF2_MODULE = "tf2-module"
_KERAS_MODEL_DATA_PATH = "data"
_TF2MODEL_SUBPATH = "tf2model"
MLflowCallback = MlflowCallback # for backwards compatibility
def get_default_pip_requirements(include_cloudpickle=False):
"""
Returns
A list of default pip requirements for MLflow Models produced by this flavor.
Calls to :func:`save_model()` and :func:`log_model()` produce a pip environment
that, at minimum, contains these requirements.
"""
pip_deps = [_get_pinned_requirement("tensorflow")]
if include_cloudpickle:
pip_deps.append(_get_pinned_requirement("cloudpickle"))
return pip_deps
def get_default_conda_env():
"""
Returns:
The default Conda environment for MLflow Models produced by calls to
:func:`save_model()` and :func:`log_model()`.
"""
return _mlflow_conda_env(additional_pip_deps=get_default_pip_requirements())
def get_global_custom_objects():
"""
Returns:
A live reference to the global dictionary of custom objects.
"""
try:
from tensorflow.keras.saving import get_custom_objects
return get_custom_objects()
except Exception:
pass
@format_docstring(LOG_MODEL_PARAM_DOCS.format(package_name=FLAVOR_NAME))
def log_model(
model,
artifact_path,
custom_objects=None,
conda_env=None,
code_paths=None,
signature: ModelSignature = None,
input_example: ModelInputExample = None,
registered_model_name=None,
await_registration_for=DEFAULT_AWAIT_MAX_SLEEP_SECONDS,
pip_requirements=None,
extra_pip_requirements=None,
saved_model_kwargs=None,
keras_model_kwargs=None,
metadata=None,
):
"""
Log a TF2 core model (inheriting tf.Module) or a Keras model in MLflow Model format.
.. note::
If you log a Keras or TensorFlow model without a signature, inference with
:py:func:`mlflow.pyfunc.spark_udf()` will not work unless the model's pyfunc
representation accepts pandas DataFrames as inference inputs.
You can infer a model's signature by calling the :py:func:`mlflow.models.infer_signature()`
API on features from the model's test dataset. You can also manually create a model
signature, for example:
.. code-block:: python
:caption: Example of creating signature for saving TensorFlow and `tf.Keras` models
from mlflow.types.schema import Schema, TensorSpec
from mlflow.models import ModelSignature
import numpy as np
input_schema = Schema(
[
TensorSpec(np.dtype(np.uint64), (-1, 5), "field1"),
TensorSpec(np.dtype(np.float32), (-1, 3, 2), "field2"),
]
)
# Create the signature for a model that requires 2 inputs:
# - Input with name "field1", shape (-1, 5), type "np.uint64"
# - Input with name "field2", shape (-1, 3, 2), type "np.float32"
signature = ModelSignature(inputs=input_schema)
Args:
model: The TF2 core model (inheriting tf.Module) or Keras model to be saved.
artifact_path: The run-relative path to which to log model artifacts.
custom_objects: A Keras ``custom_objects`` dictionary mapping names (strings) to
custom classes or functions associated with the Keras model. MLflow saves
these custom layers using CloudPickle and restores them automatically
when the model is loaded with :py:func:`mlflow.tensorflow.load_model` and
:py:func:`mlflow.pyfunc.load_model`.
conda_env: {{ conda_env }}
code_paths: {{ code_paths }}
registered_model_name: If given, create a model version under
``registered_model_name``, also creating a registered model if one
with the given name does not exist.
signature: {{ signature }}
input_example: {{ input_example }}
await_registration_for: Number of seconds to wait for the model version to finish
being created and is in ``READY`` status. By default, the function
waits for five minutes. Specify 0 or None to skip waiting.
pip_requirements: {{ pip_requirements }}
extra_pip_requirements: {{ extra_pip_requirements }}
saved_model_kwargs: a dict of kwargs to pass to ``tensorflow.saved_model.save`` method.
keras_model_kwargs: a dict of kwargs to pass to ``keras_model.save`` method.
metadata: {{ metadata }}
Returns
A :py:class:`ModelInfo <mlflow.models.model.ModelInfo>` instance that contains the
metadata of the logged model.
"""
return Model.log(
artifact_path=artifact_path,
flavor=mlflow.tensorflow,
model=model,
conda_env=conda_env,
code_paths=code_paths,
custom_objects=custom_objects,
registered_model_name=registered_model_name,
signature=signature,
input_example=input_example,
await_registration_for=await_registration_for,
pip_requirements=pip_requirements,
extra_pip_requirements=extra_pip_requirements,
saved_model_kwargs=saved_model_kwargs,
keras_model_kwargs=keras_model_kwargs,
metadata=metadata,
)
def _save_keras_custom_objects(path, custom_objects, file_name):
"""
Save custom objects dictionary to a cloudpickle file so a model can be easily loaded later.
Args:
path: An absolute path that points to the data directory within /path/to/model.
custom_objects: Keras ``custom_objects`` is a dictionary mapping
names (strings) to custom classes or functions to be considered
during deserialization. MLflow saves these custom layers using
CloudPickle and restores them automatically when the model is
loaded with :py:func:`mlflow.keras.load_model` and
:py:func:`mlflow.pyfunc.load_model`.
file_name: The file name to save the custom objects to.
"""
import cloudpickle
custom_objects_path = os.path.join(path, file_name)
with open(custom_objects_path, "wb") as out_f:
cloudpickle.dump(custom_objects, out_f)
_NO_MODEL_SIGNATURE_WARNING = (
"You are saving a TensorFlow Core model or Keras model "
"without a signature. Inference with mlflow.pyfunc.spark_udf() will not work "
"unless the model's pyfunc representation accepts pandas DataFrames as "
"inference inputs."
)
@format_docstring(LOG_MODEL_PARAM_DOCS.format(package_name=FLAVOR_NAME))
def save_model(
model,
path,
conda_env=None,
code_paths=None,
mlflow_model=None,
custom_objects=None,
signature: ModelSignature = None,
input_example: ModelInputExample = None,
pip_requirements=None,
extra_pip_requirements=None,
saved_model_kwargs=None,
keras_model_kwargs=None,
metadata=None,
):
"""
Save a TF2 core model (inheriting tf.Module) or Keras model in MLflow Model format to a path on
the local file system.
.. note::
If you save a Keras or TensorFlow model without a signature, inference with
:py:func:`mlflow.pyfunc.spark_udf()` will not work unless the model's pyfunc
representation accepts pandas DataFrames as inference inputs.
You can infer a model's signature by calling the :py:func:`mlflow.models.infer_signature()`
API on features from the model's test dataset. You can also manually create a model
signature, for example:
.. code-block:: python
:caption: Example of creating signature for saving TensorFlow and `tf.Keras` models
from mlflow.types.schema import Schema, TensorSpec
from mlflow.models import ModelSignature
import numpy as np
input_schema = Schema(
[
TensorSpec(np.dtype(np.uint64), (-1, 5), "field1"),
TensorSpec(np.dtype(np.float32), (-1, 3, 2), "field2"),
]
)
# Create the signature for a model that requires 2 inputs:
# - Input with name "field1", shape (-1, 5), type "np.uint64"
# - Input with name "field2", shape (-1, 3, 2), type "np.float32"
signature = ModelSignature(inputs=input_schema)
Args:
model: The Keras model or Tensorflow module to be saved.
path: Local path where the MLflow model is to be saved.
conda_env: {{ conda_env }}
code_paths: {{ code_paths }}
mlflow_model: MLflow model configuration to which to add the ``tensorflow`` flavor.
custom_objects: A Keras ``custom_objects`` dictionary mapping names (strings) to
custom classes or functions associated with the Keras model. MLflow saves
these custom layers using CloudPickle and restores them automatically
when the model is loaded with :py:func:`mlflow.tensorflow.load_model` and
:py:func:`mlflow.pyfunc.load_model`.
signature: {{ signature }}
input_example: {{ input_example }}
pip_requirements: {{ pip_requirements }}
extra_pip_requirements: {{ extra_pip_requirements }}
saved_model_kwargs: a dict of kwargs to pass to ``tensorflow.saved_model.save`` method
if the model to be saved is a Tensorflow module.
keras_model_kwargs: a dict of kwargs to pass to ``model.save`` method if the model
to be saved is a keras model.
metadata: {{ metadata }}
"""
import tensorflow as tf
from tensorflow.keras.models import Model as KerasModel
# check if path exists
path = os.path.abspath(path)
_validate_and_prepare_target_save_path(path)
code_dir_subpath = _validate_and_copy_code_paths(code_paths, path)
if mlflow_model is None:
mlflow_model = Model()
saved_example = _save_example(mlflow_model, input_example, path)
if signature is None and saved_example is not None:
wrapped_model = None
if isinstance(model, KerasModel):
wrapped_model = _KerasModelWrapper(model, signature)
elif isinstance(model, tf.Module):
wrapped_model = _TF2ModuleWrapper(model, signature)
if wrapped_model is not None:
signature = _infer_signature_from_input_example(saved_example, wrapped_model)
elif signature is False:
signature = None
if signature is None:
_logger.warning(_NO_MODEL_SIGNATURE_WARNING)
else:
num_inputs = len(signature.inputs.inputs)
if num_inputs == 0:
raise MlflowException(
"The model signature's input schema must contain at least one field.",
error_code=INVALID_PARAMETER_VALUE,
)
for field in signature.inputs.inputs:
if not isinstance(field, TensorSpec):
raise MlflowException(
"All fields in the model signature's input schema must be of type TensorSpec.",
error_code=INVALID_PARAMETER_VALUE,
)
if field.shape[0] != -1:
raise MlflowException(
"All fields in the model signature's input schema must have a shape "
"in which the first dimension is a variable dimension.",
error_code=INVALID_PARAMETER_VALUE,
)
_validate_env_arguments(conda_env, pip_requirements, extra_pip_requirements)
if signature is not None:
mlflow_model.signature = signature
if metadata is not None:
mlflow_model.metadata = metadata
if isinstance(model, KerasModel):
keras_model_kwargs = keras_model_kwargs or {}
data_subpath = _KERAS_MODEL_DATA_PATH
# construct new data folder in existing path
data_path = os.path.join(path, data_subpath)
os.makedirs(data_path)
model_subpath = os.path.join(data_subpath, _MODEL_SAVE_PATH)
keras_module = importlib.import_module("tensorflow.keras")
# save custom objects if there are custom objects
if custom_objects is not None:
_save_keras_custom_objects(data_path, custom_objects, _CUSTOM_OBJECTS_SAVE_PATH)
# save custom objects stored within _GLOBAL_CUSTOM_OBJECTS
if global_custom_objects := get_global_custom_objects():
_save_keras_custom_objects(
data_path, global_custom_objects, _GLOBAL_CUSTOM_OBJECTS_SAVE_PATH
)
# save keras module spec to path/data/keras_module.txt
with open(os.path.join(data_path, _KERAS_MODULE_SPEC_PATH), "w") as f:
f.write(keras_module.__name__)
# Use the SavedModel format if `save_format` is unspecified
save_format = keras_model_kwargs.get("save_format", "tf")
# save keras save_format to path/data/save_format.txt
with open(os.path.join(data_path, _KERAS_SAVE_FORMAT_PATH), "w") as f:
f.write(save_format)
# save keras model
# To maintain prior behavior, when the format is HDF5, we save
# with the h5 file extension. Otherwise, model_path is a directory
# where the saved_model.pb will be stored (for SavedModel format)
# For tensorflow 2.16.0 (including dev version),
# it only supports saving model in .h5 or .keras format
if save_format == "h5":
file_extension = ".h5"
elif Version(tf.__version__).release >= (2, 16):
file_extension = ".keras"
else:
file_extension = ""
model_path = os.path.join(path, model_subpath) + file_extension
if path.startswith("/dbfs/"):
# The Databricks Filesystem uses a FUSE implementation that does not support
# random writes. It causes an error.
with tempfile.NamedTemporaryFile(suffix=".h5") as f:
model.save(f.name, **keras_model_kwargs)
f.flush() # force flush the data
shutil.copy2(src=f.name, dst=model_path)
else:
model.save(model_path, **keras_model_kwargs)
pyfunc_options = {
"data": data_subpath,
}
flavor_options = {
**pyfunc_options,
"model_type": _MODEL_TYPE_KERAS,
"keras_version": tf.__version__,
"save_format": save_format,
}
elif isinstance(model, tf.Module):
saved_model_kwargs = saved_model_kwargs or {}
model_dir_subpath = _TF2MODEL_SUBPATH
model_path = os.path.join(path, model_dir_subpath)
tf.saved_model.save(model, model_path, **saved_model_kwargs)
pyfunc_options = {}
flavor_options = {
"saved_model_dir": model_dir_subpath,
"model_type": _MODEL_TYPE_TF2_MODULE,
}
else:
raise MlflowException(f"Unknown model type: {type(model)}")
# update flavor info to mlflow_model
mlflow_model.add_flavor(FLAVOR_NAME, code=code_dir_subpath, **flavor_options)
# append loader_module, data and env data to mlflow_model
pyfunc.add_to_model(
mlflow_model,
loader_module="mlflow.tensorflow",
conda_env=_CONDA_ENV_FILE_NAME,
python_env=_PYTHON_ENV_FILE_NAME,
code=code_dir_subpath,
**pyfunc_options,
)
# add model file size to mlflow_model
if size := get_total_file_size(path):
mlflow_model.model_size_bytes = size
# save mlflow_model to path/MLmodel
mlflow_model.save(os.path.join(path, MLMODEL_FILE_NAME))
include_cloudpickle = custom_objects is not None or get_global_custom_objects() is not None
if conda_env is None:
if pip_requirements is None:
default_reqs = get_default_pip_requirements(include_cloudpickle)
# To ensure `_load_pyfunc` can successfully load the model during the dependency
# inference, `mlflow_model.save` must be called beforehand to save an MLmodel file.
inferred_reqs = mlflow.models.infer_pip_requirements(
path, FLAVOR_NAME, fallback=default_reqs
)
default_reqs = sorted(set(inferred_reqs).union(default_reqs))
else:
default_reqs = None
conda_env, pip_requirements, pip_constraints = _process_pip_requirements(
default_reqs,
pip_requirements,
extra_pip_requirements,
)
else:
conda_env, pip_requirements, pip_constraints = _process_conda_env(conda_env)
with open(os.path.join(path, _CONDA_ENV_FILE_NAME), "w") as f:
yaml.safe_dump(conda_env, stream=f, default_flow_style=False)
# Save `constraints.txt` if necessary
if pip_constraints:
write_to(os.path.join(path, _CONSTRAINTS_FILE_NAME), "\n".join(pip_constraints))
# Save `requirements.txt`
write_to(os.path.join(path, _REQUIREMENTS_FILE_NAME), "\n".join(pip_requirements))
_PythonEnv.current().to_yaml(os.path.join(path, _PYTHON_ENV_FILE_NAME))
def _load_custom_objects(path, file_name):
custom_objects_path = None
if os.path.isdir(path):
if os.path.isfile(os.path.join(path, file_name)):
custom_objects_path = os.path.join(path, file_name)
if custom_objects_path is not None:
import cloudpickle
with open(custom_objects_path, "rb") as f:
return cloudpickle.load(f)
def _load_keras_model(model_path, keras_module, save_format, **kwargs):
keras_models = importlib.import_module(keras_module.__name__ + ".models")
custom_objects = kwargs.pop("custom_objects", {})
if saved_custom_objects := _load_custom_objects(model_path, _CUSTOM_OBJECTS_SAVE_PATH):
saved_custom_objects.update(custom_objects)
custom_objects = saved_custom_objects
if global_custom_objects := _load_custom_objects(model_path, _GLOBAL_CUSTOM_OBJECTS_SAVE_PATH):
global_custom_objects.update(custom_objects)
custom_objects = global_custom_objects
if os.path.isdir(model_path):
model_path = os.path.join(model_path, _MODEL_SAVE_PATH)
# If the save_format is HDF5, then we save with h5 file
# extension to align with prior behavior of mlflow logging
if save_format == "h5":
model_path += ".h5"
# Since TF 2.16.0, it only supports saving model in .h5 or .keras format.
# But for backwards compatibility, we still save model without suffix
# for older versions of TF.
elif os.path.exists(model_path + ".keras"):
model_path += ".keras"
import tensorflow as tf
# Using naive tuple-based comparison here rather than packaging.version.Version, because
# the latter consider dev version e.g. 2.16.0.dev2023010 as ahead of 2.16. While that is
# 'correct', we rather want to treat it is a part of 2.16 here.
if save_format == "h5" and (2, 2, 3) <= Version(tf.__version__).release < (2, 16):
# NOTE: TF 2.2.3 does not work with unicode paths in python2. Pass in h5py.File instead
# of string to avoid issues.
import h5py
with h5py.File(os.path.abspath(model_path), "r") as model_path:
return keras_models.load_model(model_path, custom_objects=custom_objects, **kwargs)
else:
# NOTE: Older versions of Keras only handle filepath.
return keras_models.load_model(model_path, custom_objects=custom_objects, **kwargs)
def _get_flavor_conf(model_conf):
if "keras" in model_conf.flavors:
return model_conf.flavors["keras"]
return model_conf.flavors[FLAVOR_NAME]
def _infer_model_type(model_conf):
model_type = _get_flavor_conf(model_conf).get("model_type")
if model_type is not None:
return model_type
# Loading model logged by old version mlflow, which deos not record model_type
# Inferring model type by checking whether model_conf contains "keras" flavor.
if "keras" in model_conf.flavors:
return _MODEL_TYPE_KERAS
return _MODEL_TYPE_TF1_ESTIMATOR
def load_model(model_uri, dst_path=None, saved_model_kwargs=None, keras_model_kwargs=None):
"""
Load an MLflow model that contains the TensorFlow flavor from the specified path.
Args:
model_uri: The location, in URI format, of the MLflow model. For example:
- ``/Users/me/path/to/local/model``
- ``relative/path/to/local/model``
- ``s3://my_bucket/path/to/model``
- ``runs:/<mlflow_run_id>/run-relative/path/to/model``
- ``models:/<model_name>/<model_version>``
- ``models:/<model_name>/<stage>``
For more information about supported URI schemes, see
`Referencing Artifacts <https://www.mlflow.org/docs/latest/concepts.html#
artifact-locations>`_.
dst_path: The local filesystem path to which to download the model artifact.
This directory must already exist. If unspecified, a local output
path will be created.
saved_model_kwargs: kwargs to pass to ``tensorflow.saved_model.load`` method.
Only available when you are loading a tensorflow2 core model.
keras_model_kwargs: kwargs to pass to ``keras.models.load_model`` method.
Only available when you are loading a Keras model.
Returns
A callable graph (tf.function) that takes inputs and returns inferences.
"""
import tensorflow as tf
local_model_path = _download_artifact_from_uri(artifact_uri=model_uri, output_path=dst_path)
model_configuration_path = os.path.join(local_model_path, MLMODEL_FILE_NAME)
model_conf = Model.load(model_configuration_path)
flavor_conf = _get_flavor_conf(model_conf)
_add_code_from_conf_to_system_path(local_model_path, flavor_conf)
model_type = _infer_model_type(model_conf)
if model_type == _MODEL_TYPE_KERAS:
keras_model_kwargs = keras_model_kwargs or {}
keras_module = importlib.import_module(flavor_conf.get("keras_module", "tensorflow.keras"))
# For backwards compatibility, we assume h5 when the save_format is absent
save_format = flavor_conf.get("save_format", "h5")
model_path = os.path.join(local_model_path, flavor_conf.get("data", _MODEL_SAVE_PATH))
return _load_keras_model(
model_path=model_path,
keras_module=keras_module,
save_format=save_format,
**keras_model_kwargs,
)
if model_type == _MODEL_TYPE_TF1_ESTIMATOR:
tf_saved_model_dir = os.path.join(local_model_path, flavor_conf["saved_model_dir"])
tf_meta_graph_tags = flavor_conf["meta_graph_tags"]
tf_signature_def_key = flavor_conf["signature_def_key"]
return _load_tf1_estimator_saved_model(
tf_saved_model_dir=tf_saved_model_dir,
tf_meta_graph_tags=tf_meta_graph_tags,
tf_signature_def_key=tf_signature_def_key,
)
if model_type == _MODEL_TYPE_TF2_MODULE:
saved_model_kwargs = saved_model_kwargs or {}
tf_saved_model_dir = os.path.join(local_model_path, flavor_conf["saved_model_dir"])
return tf.saved_model.load(tf_saved_model_dir, **saved_model_kwargs)
raise MlflowException(f"Unknown model_type: {model_type}")
def _load_tf1_estimator_saved_model(tf_saved_model_dir, tf_meta_graph_tags, tf_signature_def_key):
"""
Load a specified TensorFlow model consisting of a TensorFlow metagraph and signature definition
from a serialized TensorFlow ``SavedModel`` collection.
Args:
tf_saved_model_dir: The local filesystem path or run-relative artifact path to the model.
tf_meta_graph_tags: A list of tags identifying the model's metagraph within the
serialized ``SavedModel`` object. For more information, see the
``tags`` parameter of the `tf.saved_model.builder.SavedModelBuilder
method <https://www.tensorflow.org/api_docs/python/tf/saved_model/
builder/SavedModelBuilder#add_meta_graph>`_.
tf_signature_def_key: A string identifying the input/output signature associated with the
model. This is a key within the serialized ``SavedModel``'s
signature definition mapping. For more information, see the
``signature_def_map`` parameter of the
``tf.saved_model.builder.SavedModelBuilder`` method.
Returns:
A callable graph (tensorflow.function) that takes inputs and returns inferences.
"""
import tensorflow as tf
loaded = tf.saved_model.load(tags=tf_meta_graph_tags, export_dir=tf_saved_model_dir)
loaded_sig = loaded.signatures
if tf_signature_def_key not in loaded_sig:
raise MlflowException(
f"Could not find signature def key {tf_signature_def_key}. "
f"Available keys are: {list(loaded_sig.keys())}"
)
return loaded_sig[tf_signature_def_key]
def _load_pyfunc(path):
"""
Load PyFunc implementation. Called by ``pyfunc.load_model``. This function loads an MLflow
model with the TensorFlow flavor into a new TensorFlow graph and exposes it behind the
``pyfunc.predict`` interface.
Args:
path: Local filesystem path to the MLflow Model with the ``tensorflow`` flavor.
"""
import tensorflow as tf
model_meta_path1 = os.path.join(path, MLMODEL_FILE_NAME)
model_meta_path2 = os.path.join(os.path.dirname(path), MLMODEL_FILE_NAME)
if os.path.isfile(model_meta_path1):
model_meta = Model.load(model_meta_path1)
elif os.path.isfile(model_meta_path2):
model_meta = Model.load(model_meta_path2)
else:
raise MlflowException(f"Cannot find file {MLMODEL_FILE_NAME} for the logged model.")
model_type = _infer_model_type(model_meta)
if model_type == _MODEL_TYPE_KERAS:
if os.path.isfile(os.path.join(path, _KERAS_MODULE_SPEC_PATH)):
with open(os.path.join(path, _KERAS_MODULE_SPEC_PATH)) as f:
keras_module = importlib.import_module(f.read())
else:
from tensorflow import keras
keras_module = keras
# By default, we assume the save_format is h5 for backwards compatibility
save_format = "h5"
save_format_path = os.path.join(path, _KERAS_SAVE_FORMAT_PATH)
if os.path.isfile(save_format_path):
with open(save_format_path) as f:
save_format = f.read()
# In SavedModel format, loaded model should be compiled.
should_compile = save_format == "tf"
m = _load_keras_model(
path, keras_module=keras_module, save_format=save_format, compile=should_compile
)
return _KerasModelWrapper(m, model_meta.signature)
if model_type == _MODEL_TYPE_TF1_ESTIMATOR:
flavor_conf = _get_flavor_configuration(path, FLAVOR_NAME)
tf_saved_model_dir = os.path.join(path, flavor_conf["saved_model_dir"])
tf_meta_graph_tags = flavor_conf["meta_graph_tags"]
tf_signature_def_key = flavor_conf["signature_def_key"]
loaded_model = tf.saved_model.load(export_dir=tf_saved_model_dir, tags=tf_meta_graph_tags)
return _TF2Wrapper(model=loaded_model, infer=loaded_model.signatures[tf_signature_def_key])
if model_type == _MODEL_TYPE_TF2_MODULE:
flavor_conf = _get_flavor_configuration(path, FLAVOR_NAME)
tf_saved_model_dir = os.path.join(path, flavor_conf["saved_model_dir"])
loaded_model = tf.saved_model.load(tf_saved_model_dir)
return _TF2ModuleWrapper(model=loaded_model, signature=model_meta.signature)
raise MlflowException("Unknown model_type.")
class _TF2Wrapper:
"""
Wrapper class that exposes a TensorFlow model for inference via a ``predict`` function such that
``predict(data: pandas.DataFrame) -> pandas.DataFrame``. For TensorFlow versions >= 2.0.0.
"""
def __init__(self, model, infer):
"""
Args:
model: A Tensorflow SavedModel.
infer: Tensorflow function returned by a saved model that is used for inference.
"""
# Note: we need to retain the model reference in TF2Wrapper object, because the infer
# function in tensorflow will be `ConcreteFunction` which only retains WeakRefs to the
# variables they close over.
# See https://www.tensorflow.org/guide/function#deleting_tfvariables_between_function_calls
self.model = model
self.infer = infer
def get_raw_model(self):
"""
Returns the underlying model.
"""
return self.model
def predict(
self,
data,
params: Optional[Dict[str, Any]] = None,
):
"""
Args:
data: Model input data.
params: Additional parameters to pass to the model for inference.
Returns:
Model predictions.
"""
import tensorflow as tf
feed_dict = {}
if isinstance(data, dict):
feed_dict = {k: tf.constant(v) for k, v in data.items()}
elif isinstance(data, pandas.DataFrame):
for df_col_name in list(data):
# If there are multiple columns with the same name, selecting the shared name
# from the DataFrame will result in another DataFrame containing the columns
# with the shared name. TensorFlow cannot make eager tensors out of pandas
# DataFrames, so we convert the DataFrame to a numpy array here.
val = data[df_col_name]
val = val.values if isinstance(val, pandas.DataFrame) else np.array(val.to_list())
feed_dict[df_col_name] = tf.constant(val)
else:
raise TypeError("Only dict and DataFrame input types are supported")
raw_preds = self.infer(**feed_dict)
pred_dict = {col_name: raw_preds[col_name].numpy() for col_name in raw_preds.keys()}
for col in pred_dict.keys():
# If the output tensor is not 1-dimensional
# AND all elements have length of 1, flatten the array with `ravel()`
if len(pred_dict[col].shape) != 1 and all(
len(element) == 1 for element in pred_dict[col]
):
pred_dict[col] = pred_dict[col].ravel()
else:
pred_dict[col] = pred_dict[col].tolist()
if isinstance(data, dict):
return pred_dict
else:
return pandas.DataFrame.from_dict(data=pred_dict)
class _TF2ModuleWrapper:
def __init__(self, model, signature):
self.model = model
self.signature = signature
def get_raw_model(self):
"""
Returns the underlying model.
"""
return self.model
def predict(
self,
data,
params: Optional[Dict[str, Any]] = None,
):
"""
Args:
data: Model input data.
params: Additional parameters to pass to the model for inference.
Returns:
Model predictions.
"""
import tensorflow as tf
if isinstance(data, (np.ndarray, list)):
data = tf.convert_to_tensor(data)
else:
raise MlflowException(
f"Unsupported input data type: {type(data)}, the input data must be "
"numpy array or a list."
)
result = self.model(data)
if isinstance(result, tf.Tensor):
return result.numpy()
return result
class _KerasModelWrapper:
def __init__(self, keras_model, signature):
self.keras_model = keras_model
self.signature = signature
def get_raw_model(self):
"""
Returns the underlying model.
"""
return self.keras_model
def predict(
self,
data,
params: Optional[Dict[str, Any]] = None,
):
"""
Args:
data: Model input data.
params: Additional parameters to pass to the model for inference.
Returns
Model predictions.
"""
if isinstance(data, pandas.DataFrame):
# This line is for backwards compatibility:
# If model signature is not None, when calling
# `keras_pyfunc_model.predict(pandas_dataframe)`, `_enforce_schema` will convert
# dataframe input into dict input, so in the case `_KerasModelWrapper.predict`
# will receive a dict type input.
# If model signature is None, `_enforce_schema` can do nothing, and if the input
# is dataframe, `_KerasModelWrapper.predict` will receive a dataframe input,
# we need to handle this case, to keep backwards compatibility.
return pandas.DataFrame(self.keras_model.predict(data.values), index=data.index)
supported_input_types = (np.ndarray, list, tuple, dict)
if not isinstance(data, supported_input_types):
raise MlflowException(
f"Unsupported input data type: {type(data)}. "
f"Must be one of: {[x.__name__ for x in supported_input_types]}",
INVALID_PARAMETER_VALUE,
)
return self.keras_model.predict(data)
def _assoc_list_to_map(lst):
"""
Convert an association list to a dictionary.
"""
d = {}
for run_id, metric in lst:
d[run_id] = d[run_id] + [metric] if run_id in d else [metric]
return d
@picklable_exception_safe_function
def _get_tensorboard_callback(lst):
import tensorflow as tf
for x in lst:
if isinstance(x, tf.keras.callbacks.TensorBoard):
return x
return None
# A representation of a TensorBoard event logging directory with two attributes:
# :location - string: The filesystem location of the logging directory
# :is_temp - boolean: `True` if the logging directory was created for temporary use by MLflow,
# `False` otherwise
class _TensorBoardLogDir(NamedTuple):
location: str
is_temp: bool
def _setup_callbacks(callbacks, log_every_epoch, log_every_n_steps):
"""
Adds TensorBoard and MlfLowTfKeras callbacks to the
input list, and returns the new list and appropriate log directory.
"""
from mlflow.tensorflow.autologging import _TensorBoard
from mlflow.tensorflow.callback import MlflowCallback, MlflowModelCheckpointCallback
tb = _get_tensorboard_callback(callbacks)
for callback in callbacks:
if isinstance(callback, MlflowCallback):
raise MlflowException(
"MLflow autologging must be turned off if an `MlflowCallback` is explicitly added "
"to the callback list. You are creating an `MlflowCallback` while having "
"autologging enabled. Please either call `mlflow.tensorflow.autolog(disable=True)` "
"to disable autologging or remove `MlflowCallback` from the callback list. "
)
if tb is None:
log_dir = _TensorBoardLogDir(location=tempfile.mkdtemp(), is_temp=True)
callbacks.append(_TensorBoard(log_dir.location))
else:
log_dir = _TensorBoardLogDir(location=tb.log_dir, is_temp=False)
callbacks.append(
MlflowCallback(
log_every_epoch=log_every_epoch,
log_every_n_steps=log_every_n_steps,
)
)
model_checkpoint = get_autologging_config(mlflow.tensorflow.FLAVOR_NAME, "checkpoint", True)
if model_checkpoint:
checkpoint_monitor = get_autologging_config(
mlflow.tensorflow.FLAVOR_NAME, "checkpoint_monitor", "val_loss"
)
checkpoint_mode = get_autologging_config(
mlflow.tensorflow.FLAVOR_NAME, "checkpoint_mode", "min"
)
checkpoint_save_best_only = get_autologging_config(
mlflow.tensorflow.FLAVOR_NAME, "checkpoint_save_best_only", True
)
checkpoint_save_weights_only = get_autologging_config(
mlflow.tensorflow.FLAVOR_NAME, "checkpoint_save_weights_only", False
)
checkpoint_save_freq = get_autologging_config(
mlflow.tensorflow.FLAVOR_NAME, "checkpoint_save_freq", "epoch"
)
if not any(isinstance(callback, MlflowModelCheckpointCallback) for callback in callbacks):
callbacks.append(
MlflowModelCheckpointCallback(
monitor=checkpoint_monitor,
mode=checkpoint_mode,
save_best_only=checkpoint_save_best_only,
save_weights_only=checkpoint_save_weights_only,
save_freq=checkpoint_save_freq,
)
)
return callbacks, log_dir
@autologging_integration(FLAVOR_NAME)
def autolog(
every_n_iter=1,
log_models=True,
log_datasets=True,
disable=False,