/
save.py
1642 lines (1416 loc) · 68 KB
/
save.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Exports a SavedModel from a Trackable Python object."""
import collections
import os
import re
import sys
import traceback
from typing import Any, Callable, Dict, List, Tuple
from absl import logging
from tensorflow.core.framework import function_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import node_def_pb2
from tensorflow.core.framework import versions_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.core.protobuf import saved_model_pb2
from tensorflow.core.protobuf import saved_object_graph_pb2
from tensorflow.python.checkpoint import checkpoint
from tensorflow.python.checkpoint import checkpoint_options
from tensorflow.python.checkpoint import functional_saver
from tensorflow.python.checkpoint import graph_view
from tensorflow.python.checkpoint import save_util_v1
from tensorflow.python.checkpoint import util as checkpoint_util
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function as defun
from tensorflow.python.eager.polymorphic_function import concrete_function as cf
from tensorflow.python.eager.polymorphic_function import polymorphic_function
from tensorflow.python.eager.polymorphic_function import saved_model_exported_concrete
from tensorflow.python.eager.polymorphic_function import saved_model_utils
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import function as framework_fn
from tensorflow.python.framework import meta_graph
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import versions
from tensorflow.python.lib.io import file_io
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.saved_model import builder_impl
from tensorflow.python.saved_model import fingerprinting_utils
from tensorflow.python.saved_model import function_serialization
from tensorflow.python.saved_model import path_helpers
from tensorflow.python.saved_model import pywrap_saved_model
from tensorflow.python.saved_model import registration
from tensorflow.python.saved_model import revived_types
from tensorflow.python.saved_model import save_context
from tensorflow.python.saved_model import save_options
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import signature_def_utils
from tensorflow.python.saved_model import signature_serialization
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.saved_model import tracing_utils
from tensorflow.python.saved_model import utils_impl
from tensorflow.python.saved_model.pywrap_saved_model import constants
from tensorflow.python.saved_model.pywrap_saved_model import metrics
from tensorflow.python.trackable import asset
from tensorflow.python.trackable import base
from tensorflow.python.trackable import resource
from tensorflow.python.trackable import trackable_utils
from tensorflow.python.training.saving import trace_saveable_util
from tensorflow.python.types import core as types_core
from tensorflow.python.util import compat
from tensorflow.python.util import object_identity
from tensorflow.python.util import tf_stack
from tensorflow.python.util.tf_export import tf_export
# Placeholder for protosplitter import.
_UNCOPIABLE_DTYPES = frozenset((dtypes.resource, dtypes.variant))
# Container for tensors captured from external functions.
_CapturedTensor = collections.namedtuple("_CapturedTensor",
["name", "concrete_function"])
# Number of untraced functions to display to user in warning message.
_NUM_DISPLAY_UNTRACED_FUNCTIONS = 5
# API label for SavedModel metrics.
_SAVE_V2_LABEL = "save_v2"
class _AugmentedGraphView(graph_view.ObjectGraphView):
"""An extendable graph which also tracks functions attached to objects.
Extensions through `add_object` appear in the object graph and any checkpoints
generated from it, even if they are not dependencies of the node they were
attached to in the saving program. For example a `.signatures` attribute is
added to exported SavedModel root objects without modifying the root object
itself.
Also tracks functions attached to objects in the graph, through the caching
`_list_functions` method. Enumerating functions only through this method
ensures that we get a consistent view of functions, even if object attributes
create new functions every time they are accessed.
"""
def __init__(self, root):
super(_AugmentedGraphView, self).__init__(root)
# Cache the results of `GraphView.list_children()` to ensure that the
# `Trackable` children are gathered exactly once.
self._children_cache = object_identity.ObjectIdentityDictionary()
# Cache shared between objects in the same object graph. This is passed to
# `Trackable._trackable_children()`.
self._serialization_cache = object_identity.ObjectIdentityDictionary()
# Maps functions -> wrapped functions that capture non-cached variables.
self._wrapped_functions = {}
self.untraced_functions = []
def set_signature(
self,
signature_map: signature_serialization._SignatureMap,
wrapped_functions: Dict[Callable[..., Any], Callable[..., Any]],
):
"""Attach signature to the root object.
Args:
signature_map: An object that contains signature functions.
wrapped_functions: A dictionary mapping functions to functions that are
guaranteed to not capture cached variables (functions that capture
cached variables can't be saved).
"""
self.list_children(self.root)
# Overrides existing dependency.
name = signature_serialization.SIGNATURE_ATTRIBUTE_NAME
self._children_cache[self.root][name] = signature_map
self._wrapped_functions.update(wrapped_functions)
def _breadth_first_traversal(self):
"""Returns all trackable objects in the SavedObjectGraph."""
# This method is overriden to merge all equivalent constant tensors and
# Assets in the object graph.
trackable_objects, _ = (
super(_AugmentedGraphView, self)._breadth_first_traversal())
asset_paths = object_identity.ObjectIdentityDictionary()
constant_captures = object_identity.ObjectIdentityDictionary()
for obj in trackable_objects:
if isinstance(obj, asset.Asset):
asset_paths[obj.asset_path] = obj
if isinstance(obj, saved_model_utils.TrackableConstant):
constant_captures[obj.capture] = obj
def _get_merged_trackable(x):
if isinstance(x, asset.Asset):
return asset_paths[x.asset_path]
if isinstance(x, saved_model_utils.TrackableConstant):
if x.capture in asset_paths:
return asset_paths[x.capture]
else:
return constant_captures[x.capture]
return x
for obj in list(self._children_cache.keys()):
if _get_merged_trackable(obj) is not obj:
del self._children_cache[obj]
continue
for name, child in self._children_cache[obj].items():
self._children_cache[obj][name] = _get_merged_trackable(child)
return super(_AugmentedGraphView, self)._breadth_first_traversal()
def list_children(self, obj):
"""Lists children of `obj` for SavedModel."""
if obj not in self._children_cache:
children = self._children_cache[obj] = {}
for name, child in super(_AugmentedGraphView, self).list_children(
obj,
save_type=base.SaveType.SAVEDMODEL,
cache=self._serialization_cache):
if isinstance(child, defun.ConcreteFunction):
child = self._maybe_uncache_variable_captures(child)
children[name] = child
# Keep track of untraced functions for later reporting to the user.
if isinstance(obj, def_function.Function) and not children:
self.untraced_functions.append(obj.name)
for name, child in self._children_cache[obj].items():
yield base.TrackableReference(name, child)
def get_child(self, obj, name: str):
return self._children_cache[obj][name]
def _maybe_uncache_variable_captures(
self, concrete_function: cf.ConcreteFunction
):
if concrete_function in self._wrapped_functions:
return self._wrapped_functions[concrete_function]
for capture in concrete_function.captured_inputs:
if hasattr(capture, "_cached_variable"):
if concrete_function not in self._wrapped_functions:
wrapped = self._wrapped_functions[concrete_function] = (
function_serialization.wrap_cached_variables(concrete_function)
)
return wrapped
return concrete_function
def list_dependencies(self, obj):
"""Yields `Trackables` that must be loaded before `obj`.
Dependencies and children are both dictionaries of `Trackables`. Children
define the object graph structure (used in both checkpoints and SavedModel),
while dependency defines the order used to load the SavedModel
Args:
obj: A `Trackable` object
Yields:
Tuple of dependency names and trackable objects.
Raises:
TypeError: if any of the returned dependencies are not instances of
`Trackable`.
"""
if obj not in self._children_cache:
# Slot variables do not appear in the children_cache.
children = {}
else:
children = self._children_cache[obj]
for name, dep in obj._deserialization_dependencies(children).items(): # pylint: disable=protected-access
if not isinstance(dep, base.Trackable):
raise TypeError(
f"The dependency of type {type(dep)} is not an instance `Trackable`"
", and can't be saved to SavedModel. Please check the "
"implementation of `_deserialization_dependencies` in the parent "
f"object {obj}.")
yield name, dep
class _SaveableView(object):
"""Provides a frozen view over a trackable root.
This class helps to create a single stable view over an object to save. The
saving code should access properties and functions via this class and not via
the original object as there are cases where an object construct their
trackable attributes and functions dynamically per call and will yield
different objects if invoked more than once.
Changes to the graph, for example adding objects, must happen in
`augmented_graph_view` (an `_AugmentedGraphView`) before the `_SaveableView`
is constructed. Changes after the `_SaveableView` has been constructed will be
ignored.
"""
def __init__(
self,
augmented_graph_view: _AugmentedGraphView,
options: save_options.SaveOptions,
):
"""Initializes a SaveableView.
Args:
augmented_graph_view: A GraphView object.
options: A SaveOptions instance.
"""
self.augmented_graph_view = augmented_graph_view
self._options = options
(self._trackable_objects, self.node_paths, self.node_ids,
self._slot_variables, self.object_names) = (
checkpoint_util.objects_ids_and_slot_variables_and_paths(
self.augmented_graph_view))
untraced_functions = self.augmented_graph_view.untraced_functions
if untraced_functions:
logging.info(
"Found untraced functions such as %s while saving (showing %d of %d)."
" These functions will not be directly callable after loading.",
", ".join(untraced_functions[:_NUM_DISPLAY_UNTRACED_FUNCTIONS]),
min(_NUM_DISPLAY_UNTRACED_FUNCTIONS, len(untraced_functions)),
len(untraced_functions))
self._initialize_save_and_restore_functions()
self._initialize_nodes_and_concrete_functions()
self.captured_tensor_node_ids = object_identity.ObjectIdentityDictionary()
def _initialize_save_and_restore_functions(self):
"""Generates all checkpoint save/restore functions.
The save and restore functions are generated in the eager context (or in the
user's Graph/Session) before being copied to the exported GraphDef. These
functions record the ops for saving/restoring the entire object or
individual objects (e.g. variables and hash tables).
The global save and restore functions are generated for compatibility with
TF1 and loading from C++, and is saved in the `MetaGraphDef.saver_def`.
The individual functions are generated for the Python TF2 use case, where
users use the loaded SavedModel as-is, or compose new models using parts
of the object loaded from the SavedModel. These functions are recorded in
the `saveable_objects` map in the `SavedObject` proto.
"""
checkpoint_factory_map, registered_savers = (
save_util_v1.get_checkpoint_factories_and_keys(self.object_names))
self._obj_to_registered_saver = object_identity.ObjectIdentityDictionary()
for saver_name, trackables in registered_savers.items():
for trackable in trackables.values():
self._obj_to_registered_saver[trackable] = saver_name
self._saveable_objects_map = (
_gen_save_and_restore_functions(checkpoint_factory_map))
def _initialize_nodes_and_concrete_functions(self):
"""Creates graph with nodes for trackable objects and functions.
Adds functions for each trackable object to `self.nodes` and associated
concrete functions to `self.concrete_functions` for serialization.
"""
self.nodes = list(self._trackable_objects)
self.gradient_functions = []
self.gradient_defs = []
for obj in self.nodes:
if obj in self._saveable_objects_map:
for save_fn, restore_fn in self._saveable_objects_map[obj].values():
self.node_ids[save_fn] = len(self.nodes)
self.nodes.append(save_fn)
self.node_ids[restore_fn] = len(self.nodes)
self.nodes.append(restore_fn)
self.concrete_functions = [
obj for obj in self.nodes if isinstance(obj, defun.ConcreteFunction)
]
@property
def concrete_and_gradient_functions(self):
return self.concrete_functions + self.gradient_functions
@property
def root(self):
return self.nodes[0]
def fill_object_graph_proto(
self, proto: saved_object_graph_pb2.SavedObjectGraph
):
"""Populate the nodes, children and slot_variables of a SavedObjectGraph."""
for node_id, node in enumerate(self.nodes):
assert self.node_ids[node] == node_id
object_proto = proto.nodes.add()
object_proto.slot_variables.extend(self._slot_variables.get(node, ()))
if isinstance(node, _CapturedTensor):
continue
for child in self.augmented_graph_view.list_children(node):
child_proto = object_proto.children.add()
child_proto.node_id = self.node_ids[child.ref]
child_proto.local_name = child.name
for name, ref in self.augmented_graph_view.list_dependencies(node):
child_proto = object_proto.dependencies.add()
child_proto.node_id = self.node_ids[ref]
child_proto.local_name = name
if node in self._saveable_objects_map:
assert node not in self._obj_to_registered_saver, (
"Objects can't have both SaveableObjects and a registered saver")
for local_name, (save_fn, restore_fn) in (
self._saveable_objects_map[node].items()):
saveable_object_proto = object_proto.saveable_objects[local_name]
saveable_object_proto.save_function = self.node_ids[save_fn]
saveable_object_proto.restore_function = self.node_ids[restore_fn]
elif node in self._obj_to_registered_saver:
object_proto.registered_saver = self._obj_to_registered_saver[node]
def map_resources(self):
"""Makes new resource handle ops corresponding to existing resource tensors.
Creates resource handle ops in the current default graph, whereas
`accessible_objects` will be from an eager context. Resource mapping adds
resource handle ops to the main GraphDef of a SavedModel, which allows the
C++ loader API to interact with resources.
Returns:
A tuple of (object_map, tensor_map, asset_info):
object_map: A dictionary mapping from object in `accessible_objects` to
replacement objects created to hold the new resource tensors.
tensor_map: A dictionary mapping from resource tensors extracted from
`accessible_objects` to newly created resource tensors.
asset_info: An _AssetInfo tuple describing external assets referenced
from accessible_objects.
"""
# Only makes sense when adding to the export Graph
assert not context.executing_eagerly()
# TODO(b/205007558): Handle MirroredVariables and other types of variables
# which may need special casing.
object_map = object_identity.ObjectIdentityDictionary()
tensor_map = object_identity.ObjectIdentityDictionary()
asset_info = _AssetInfo(
asset_defs=[],
asset_initializers_by_resource=object_identity.ObjectIdentityDictionary(),
asset_filename_map={},
asset_index={})
for node_id in _dependency_sorted_node_ids(self):
obj = self.nodes[node_id]
tensors = obj._export_to_saved_model_graph( # pylint: disable=protected-access
object_map=object_map,
tensor_map=tensor_map,
options=self._options)
if isinstance(obj, asset.Asset):
_add_asset_info(obj, asset_info, tensor_map[obj.asset_path])
if tensors:
for tensor in tensors:
self.captured_tensor_node_ids[tensor] = node_id
return object_map, tensor_map, asset_info
def add_capture_and_node(self, capture, node):
node_id = len(self.nodes)
self.nodes.append(node)
self.node_ids[capture] = node_id
self.node_ids[node] = node_id
self.captured_tensor_node_ids[capture] = node_id
return node_id
def get_concrete_resource_initializers(self):
concrete_initializers = []
for obj in self.nodes:
if isinstance(obj, resource.CapturableResource):
concrete_initializers.append(
self.augmented_graph_view.get_child(
obj, "_initialize").get_concrete_function())
return concrete_initializers
def _gen_save_and_restore_functions(
checkpoint_factory_map: object_identity.ObjectIdentityDictionary,
) -> object_identity.ObjectIdentityDictionary:
"""Generates global and individual save/restore concrete functions.
The global functions records the ops to save and restore the entire object to
a file prefix, while the individual functions save and restore value tensors
for resources.
This function is intended to run on the output of
`save_util_v1.get_checkpoint_factories_and_keys(object_names)`,
which returns the generated a map of `_CheckpointFactoryData`.
Args:
checkpoint_factory_map: A dictionary mapping trackable objects to
a list of `_CheckpointFactoryData`.
Returns:
Tuple of (
saveable_fn_map: Maps obj -> factory name -> (concrete save, restore)
)
"""
# Maps obj -> factory attribute_name -> (concrete save, concrete restore)
# This
saveable_fn_map = object_identity.ObjectIdentityDictionary()
for obj, factory_data_list in checkpoint_factory_map.items():
if resource_variable_ops.is_resource_variable(obj) or not factory_data_list:
# There is no need to trace the save and restore functions for variables.
continue
if factory_data_list[0].name == trackable_utils.SERIALIZE_TO_TENSORS_NAME:
# Trace Trackable save and restore functions.
assert len(factory_data_list) == 1
saveable_fn_map[obj] = {trackable_utils.SERIALIZE_TO_TENSORS_NAME: (
tracing_utils.trace_save_and_restore(obj))}
else:
# Trace deprecated SaveableObject save and restore functions.
saveable_fn_map[obj] = (
trace_saveable_util.trace_save_restore_function_map(
obj, factory_data_list))
return saveable_fn_map
def _tensor_dict_to_tensorinfo(tensor_dict):
return {
key: utils_impl.build_tensor_info_internal(value)
for key, value in tensor_dict.items()
}
def _to_safe_name_scope(signature_key: str, user_input_name: str):
"""Creates a sanitized name scope from user signature and input names.
Concatenates signature and input names, sanitizing as needed to be a valid
scope name.
Args:
signature_key: The user-provided key for the signature.
user_input_name: The user-provided name for the input placeholder.
Returns:
A name scope that is safe to be used in tf.name_scope().
"""
name_scope = "{}_{}".format(signature_key, user_input_name)
if re.match(r"^[A-Za-z0-9.][A-Za-z0-9_.\\-]*$", name_scope):
return name_scope
invalid_prefix_stripped = re.sub(r"^[^A-Za-z0-9.]*", "", name_scope)
return re.sub(r"[^A-Za-z0-9_.\\-]", "_", invalid_prefix_stripped)
def _map_function_arguments_to_created_inputs(
function_arguments: List[Any],
signature_key: str,
function_name: bytes,
defaults=None,
):
"""Creates exterior placeholders in the exported graph for function arguments.
Functions have two types of inputs: tensors captured from the outside (eager)
context, and arguments to the function which we expect to receive from the
user at each call. `_map_captures_to_created_tensors` replaces
captured tensors with stand-ins (typically these are resource dtype tensors
associated with variables). `_map_function_inputs_to_created_inputs` runs over
every argument, creating a new placeholder for each which will belong to the
exported graph rather than the function body.
Args:
function_arguments: A list of argument placeholders in the function body.
signature_key: The name of the signature being exported, for error messages.
function_name: The name of the function, for error messages.
defaults: A dictionary mapping signature_key to dictionary of
user_specified_name to Tensor representing default values.
Returns:
A tuple of (mapped_inputs, exterior_placeholders)
mapped_inputs: A list with entries corresponding to `function_arguments`
containing all of the inputs of the function gathered from the exported
graph (both captured resources and arguments).
exterior_argument_placeholders: A dictionary mapping from argument names
to placeholders in the exported graph, containing the explicit arguments
to the function which a user is expected to provide.
Raises:
ValueError: If argument names are not unique.
"""
# `exterior_argument_placeholders` holds placeholders which are outside the
# function body, directly contained in a MetaGraph of the SavedModel. The
# function body itself contains nearly identical placeholders used when
# running the function, but these exterior placeholders allow Session-based
# APIs to call the function using feeds and fetches which name Tensors in the
# MetaGraph.
exterior_argument_placeholders = {}
mapped_inputs = []
for placeholder in function_arguments:
# `export_captures` contains an exhaustive set of captures, so if we don't
# find the input there then we now know we have an argument.
user_input_name = compat.as_str_any(
placeholder.op.get_attr("_user_specified_name"))
# If the internal placeholders for a function have names which were
# uniquified by TensorFlow, then a single user-specified argument name
# must refer to multiple Tensors. The resulting signatures would be
# confusing to call. Instead, we throw an exception telling the user to
# specify explicit names.
if user_input_name != placeholder.op.name:
# This should be unreachable, since concrete functions may not be
# generated with non-unique argument names.
raise ValueError(
"Got non-flat/non-unique argument names for SavedModel signature "
f"'{signature_key}': more than one argument to "
f"'{compat.as_str_any(function_name)}' was named "
f"'{user_input_name}'. "
"Signatures have one Tensor per named input, so to have "
"predictable names Python functions used to generate these "
"signatures should avoid *args and Tensors in nested "
"structures unless unique names are specified for each. Use "
"tf.TensorSpec(..., name=...) to provide a name for a Tensor "
"input.")
default_value = defaults.get(signature_key, {}).get(user_input_name)
if default_value is not None:
placeholder_with_default = array_ops.placeholder_with_default(
input=default_value.numpy(),
shape=placeholder.shape,
name=_to_safe_name_scope(signature_key, user_input_name),
)
exterior_argument_placeholders[user_input_name] = placeholder_with_default
mapped_inputs.append(placeholder_with_default)
else:
arg_placeholder = array_ops.placeholder(
shape=placeholder.shape,
dtype=placeholder.dtype,
name=_to_safe_name_scope(signature_key, user_input_name),
)
exterior_argument_placeholders[user_input_name] = arg_placeholder
mapped_inputs.append(arg_placeholder)
return mapped_inputs, exterior_argument_placeholders
def _generate_signatures(
signature_functions: dict[str, Callable[..., Any]],
object_map: object_identity.ObjectIdentityDictionary,
defaults=None,
):
"""Validates and calls `signature_functions` in the exported graph.
Args:
signature_functions: A dictionary mapping string keys to concrete TensorFlow
functions (e.g. from `signature_serialization.canonicalize_signatures`)
which will be used to generate SignatureDefs.
object_map: A dictionary that contains mappings from signature functions to
concrete functions in the exported graph.
defaults: A dictionary mapping signature_key to dictionary of
user_specified_name to Tensor representing default values.
Returns:
Each function in the `signature_functions` dictionary is called with
placeholder Tensors, generating a function call operation and output
Tensors. The placeholder Tensors, the function call operation, and the
output Tensors from the function call are part of the default Graph.
This function then returns a dictionary with the same structure as
`signature_functions`, with the concrete functions replaced by SignatureDefs
implicitly containing information about how to call each function from a
TensorFlow 1.x Session / the C++ Loader API. These SignatureDefs reference
the generated placeholders and Tensor outputs by name.
The caller is expected to include the default Graph set while calling this
function as a MetaGraph in a SavedModel, including the returned
SignatureDefs as part of that MetaGraph.
"""
signatures = {}
for signature_key, function in sorted(signature_functions.items()):
if function.graph.captures:
argument_inputs = function.graph.inputs[:-len(function.graph.captures)]
else:
argument_inputs = function.graph.inputs
mapped_inputs, exterior_argument_placeholders = (
_map_function_arguments_to_created_inputs(
argument_inputs, signature_key, function.name, defaults
)
)
kwarg_names = list(
sorted(
object_map[function].function.structured_input_signature[1].keys()))
outputs = object_map[function](**{
kwarg_name: mapped_input
for kwarg_name, mapped_input in zip(kwarg_names, mapped_inputs)
})
signatures[signature_key] = signature_def_utils.build_signature_def(
_tensor_dict_to_tensorinfo(exterior_argument_placeholders),
_tensor_dict_to_tensorinfo(outputs),
method_name=signature_constants.PREDICT_METHOD_NAME,
defaults=defaults.get(signature_key, None),
)
return signatures
_AssetInfo = collections.namedtuple(
"_AssetInfo",
[
# List of AssetFileDef protocol buffers
"asset_defs",
# Map from asset variable resource Tensors to their init ops
"asset_initializers_by_resource",
# Map from base asset filenames to full paths
"asset_filename_map",
# Map from Asset to index of corresponding AssetFileDef
"asset_index",
],
)
def _add_asset_info(
trackable_asset,
asset_info: _AssetInfo,
mapped_path_variable: resource_variable_ops.ResourceVariable,
):
"""Add `trackable_asset` to `asset_info`."""
original_path_tensor = trackable_asset.asset_path
original_path = tensor_util.constant_value(original_path_tensor)
try:
original_path = str(original_path.astype(str))
except AttributeError:
# Already a string rather than a numpy array
pass
path = builder_impl.get_asset_filename_to_add(
asset_filepath=original_path,
asset_filename_map=asset_info.asset_filename_map)
asset_info.asset_filename_map[path] = original_path
asset_def = meta_graph_pb2.AssetFileDef()
asset_def.filename = path
asset_def.tensor_info.name = mapped_path_variable.initial_value.name
asset_info.asset_defs.append(asset_def)
asset_info.asset_initializers_by_resource[original_path_tensor] = (
mapped_path_variable.initializer)
asset_info.asset_index[trackable_asset] = len(asset_info.asset_defs) - 1
def _iterate_op_types(fn: Callable[..., Any]):
"""Iterates through each op in the function and returns the op type and op."""
if isinstance(fn, framework_fn._DefinedFunction): # pylint: disable=protected-access
for node in fn.definition.node_def:
op_type = node.attr["_gradient_op_type"].s
if op_type:
raise ValueError(
"Unable to save gradient functions when exporting a "
"_DefinedFunction (generally created through graph freezing utils "
"or through V1 graph importers). Please save with "
"`options=tf.SaveOptions(experimental_custom_gradients=False)`")
else:
for op in fn.graph.get_operations():
try:
op_type = op.get_attr("_gradient_op_type")
except ValueError:
continue
yield op_type, op
def _get_outer_most_capture(
fn: Callable[..., Any],
capture: _CapturedTensor,
func_graph_map: Dict[ops.Graph, Callable[..., Any]],
):
"""Tries to find the original captured tensor if capture more than once."""
outer_fn = fn
while outer_fn is not None and not isinstance(capture, ops.EagerTensor):
if capture.graph is not outer_fn.graph:
outer_fn = func_graph_map.get(outer_fn.graph.outer_graph)
else:
try:
capture_index = outer_fn.graph.internal_captures.index(capture)
except ValueError:
break # Capture is a tensor inside function, and not captured from
# another external function
capture = outer_fn.graph.external_captures[capture_index]
outer_fn = func_graph_map.get(outer_fn.graph.outer_graph)
return outer_fn, capture
def _trace_gradient_functions(graph: ops.Graph, saveable_view: _SaveableView):
"""Traces gradient functions and records them in the SaveableView."""
functions = list(graph._functions.values()) # pylint: disable=protected-access
func_graph_map = {f.graph: f for f in functions if hasattr(f, "graph")}
seen_op_types = set()
for fn in functions:
for op_type, op in _iterate_op_types(fn):
if op_type in seen_op_types:
continue
seen_op_types.add(op_type)
try:
custom_gradient = ops.gradient_registry.lookup(op_type)
except LookupError:
continue
try:
grad_fn = (
def_function.function(custom_gradient).get_concrete_function(
None, *op.inputs))
except Exception as exc:
traceback.print_exc()
raise ValueError(
"Error when tracing gradients for SavedModel.\n\n"
"Check the error log to see the error that was raised when "
"converting a gradient function to a concrete function. You may "
"need to update the custom gradient, or disable saving gradients "
"with the option "
"tf.saved_model.SaveOptions(experimental_custom_gradients=False)"
f".\n\tProblematic op name: {op.name}\n\tGradient inputs: "
f"{op.inputs}") from exc
with graph.as_default():
# The gradient function will capture all intermediate values. These
# captures be serialized so that they can be re-bound to the function
# when loading.
bad_captures = []
for capture in grad_fn.captured_inputs:
if capture.dtype in _UNCOPIABLE_DTYPES:
continue
# Tries to find the outermost capture in case the tensor is a constant
# or not actually captured in the current function (this could happen
# if the function is a while loop body, in which case the captured
# input is not the internal captured tensor).
outer_fn, outer_capture = _get_outer_most_capture(
fn, capture, func_graph_map
)
if outer_fn is None or isinstance(outer_capture, ops.EagerTensor):
if outer_capture not in saveable_view.captured_tensor_node_ids:
raise ValueError(
f"Found invalid capture {outer_capture} when "
"saving custom gradients."
)
saveable_view.captured_tensor_node_ids[capture] = (
saveable_view.captured_tensor_node_ids[outer_capture]
)
elif outer_capture.graph is outer_fn.graph:
capture_name = outer_capture.name
# It's possible for AtomicFunctions to save different names
# for input tensors when serialized to FunctionDef (all
# non-alphanumeric characters are converted to '_').
if isinstance(outer_fn, defun.AtomicFunction): # pylint:disable=protected-access
try:
arg_index = outer_fn.graph.inputs.index(outer_capture)
capture_name = (
outer_fn.cached_definition.signature.input_arg[
arg_index
].name
+ ":0"
)
except ValueError:
pass
node = _CapturedTensor(capture_name, outer_fn.name)
saveable_view.add_capture_and_node(capture, node)
else:
bad_captures.append(capture.name)
if not bad_captures:
grad_fn.add_to_graph(graph)
else:
raise ValueError(
f"Cannot save custom gradient {op_type} called in function {fn} "
"because SavedModel is unable to serialize the captured "
f"inputs: {bad_captures}"
)
saveable_view.gradient_functions.append(grad_fn)
func_graph_map[grad_fn.graph] = grad_fn
grad_def = function_pb2.RegisteredGradient()
grad_def.gradient_func = grad_fn.name
grad_def.registered_op_type = op_type
saveable_view.gradient_defs.append(grad_def)
def _strip_debug_nodes(meta_graph_def: meta_graph_pb2.MetaGraphDef) -> None:
"""An experimental function to remove debug nodes from the final graph.
This function removes all assert nodes from the meta_graph. It strips the
assert operators in both the nodes and in all of the function defs, replacing
them with `NoOp`s. In addition to this, it replaces all of the inputs that
are not already control inputs to control inputs. For more information about
control inputs please see go/how-tensors-flow#control-dependencies.
Args:
meta_graph_def: The meta_graph that will be exported.
"""
def is_control_input(name: str) -> str:
"""Returns whether or not the input is a control input."""
return name and name[0] == "^"
def as_control_dep(name: str) -> str:
"""Returns the input as a control dependency."""
return "^" + name.split(":")[0]
def maybe_do_strip(node: node_def_pb2.NodeDef) -> None:
"""Strips the graph by making it a NoOp if it is an Assert node.
This function also rewrites all of the inputs to the nodes that were
transformed by making them into control dependencies.
Args:
node: The node to potentally strip.
"""
if node.op == "Assert":
node.op = "NoOp"
new_inputs = []
for inp in node.input:
if not is_control_input(inp):
new_inputs.append(as_control_dep(inp))
else:
new_inputs.append(inp)
node.ClearField("input")
node.input.extend(new_inputs)
# First, we strip the assert nodes from the graph.
for node in meta_graph_def.graph_def.node:
maybe_do_strip(node)
# Then, we strip the assert nodes from all of the function defs.
for func in meta_graph_def.graph_def.library.function:
for node in func.node_def:
maybe_do_strip(node)
def _fill_meta_graph_def(
meta_graph_def: meta_graph_pb2.MetaGraphDef,
saveable_view: _SaveableView,
signature_functions: Dict[str, Callable[..., Any]],
namespace_whitelist: List[str],
save_custom_gradients: bool,
create_saver: bool,
enable_debug_stripper: bool,
defaults=None,
) -> Tuple[_AssetInfo, ops.Graph]:
"""Generates a MetaGraph which calls `signature_functions`.
Args:
meta_graph_def: The MetaGraphDef proto to fill.
saveable_view: The _SaveableView being exported.
signature_functions: A dictionary mapping signature keys to concrete
functions containing signatures to add to the MetaGraph.
namespace_whitelist: List of strings containing whitelisted op namespaces.
save_custom_gradients: Whether to save custom gradients.
create_saver: Whether to add SavedModel's native save and restore ops.
enable_debug_stripper: Whether to strip the debug nodes from the graph.
defaults: A dictionary mapping signature_key to dictionary of
user_specified_name to Tensor representing default values.
Returns:
A tuple of (_AssetInfo, Graph) containing the captured assets and
exported Graph generated from tracing the saveable_view.
"""
# List objects from the eager context to make sure Optimizers give us the
# right Graph-dependent variables.
resource_initializers = saveable_view.get_concrete_resource_initializers()
exported_graph = ops.Graph()
resource_initializer_ops = []
with exported_graph.as_default():
object_map, tensor_map, asset_info = saveable_view.map_resources()
signatures = _generate_signatures(signature_functions, object_map, defaults)
if save_custom_gradients:
# Custom gradients functions must be traced in the same context as the
# when they are registered.
_trace_gradient_functions(exported_graph, saveable_view)
with exported_graph.as_default():
# Create initializers for assets and resources.
for resource_initializer_function in resource_initializers:
asset_dependencies = []
for capture in resource_initializer_function.graph.external_captures:
asset_initializer = asset_info.asset_initializers_by_resource.get(
capture, None)
if asset_initializer is not None:
asset_dependencies.append(asset_initializer)
with ops.control_dependencies(asset_dependencies):
mapped_initializer = object_map[resource_initializer_function]
resource_initializer_ops.append(mapped_initializer())
resource_initializer_ops.extend(
asset_info.asset_initializers_by_resource.values())
with ops.control_dependencies(resource_initializer_ops):
init_op = control_flow_ops.no_op()
# Add the same op to the main_op collection and to the init_op
# signature. The collection is for compatibility with older loader APIs;
# only one will be executed.
meta_graph_def.collection_def[constants.MAIN_OP_KEY].node_list.value.append(
init_op.name)
meta_graph_def.signature_def[constants.INIT_OP_SIGNATURE_KEY].CopyFrom(
signature_def_utils.op_signature_def(init_op,
constants.INIT_OP_SIGNATURE_KEY))
# Saving an object-based checkpoint again gathers variables. We need to do the
# gathering from the eager context so Optimizers save the right set of
# variables, but want any operations associated with the save/restore to be in
# the exported graph (thus the `to_graph` argument).
def call_with_mapped_captures(function, args):
if function in object_map:
return object_map[function](*args)
# Registered saver/restore functions do not appear in `object_map`, because
# they are not in the object graph.
return saved_model_exported_concrete.ExportedConcreteFunction(
function, tensor_map)(*args)
for obj in object_map.values():
obj._maybe_initialize_trackable() # pylint: disable=protected-access
named_saveable_objects, registered_savers = (
save_util_v1.frozen_saveables_and_savers(
graph_view=saveable_view.augmented_graph_view,
object_map=object_map,
to_graph=exported_graph,
call_with_mapped_captures=call_with_mapped_captures))
if create_saver:
saver = functional_saver.MultiDeviceSaver.from_saveables(
named_saveable_objects, registered_savers, call_with_mapped_captures
)
with exported_graph.as_default():
saver_def = saver.to_proto()
meta_graph_def.saver_def.CopyFrom(saver_def)
# At this point all nodes that can be added to the SavedObjectGraph have been
# added, so run the following to validate deserialization dependencies.
_dependency_sorted_node_ids(saveable_view)
graph_def, _ = exported_graph._as_graph_def( # pylint: disable=protected-access
add_shapes=True, use_pybind11_proto=False)
graph_def.library.registered_gradients.extend(saveable_view.gradient_defs)
_verify_ops(graph_def, namespace_whitelist)