Skip to content

Commit

Permalink
Fix for forward passing
Browse files Browse the repository at this point in the history
  • Loading branch information
knowicki-nvidia committed Jun 19, 2024
1 parent e30ecb7 commit b8c265a
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 3 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ limitations under the License.
- change: Improved handling defaults for ONNX Dynamo when executing `nav.package.optimize`
- fix: Maintaining modules device in `nav.profile()`
- fix: Add support for all precisions for TensorRT in `nav.profile()`
- fix: Forward method not passed to other inplace modules.

[//]: <> (put here on external component update with short summary what change or link to changelog)

Expand Down
2 changes: 1 addition & 1 deletion model_navigator/inplace/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def __init__(
self._input_mapping = input_mapping
self._output_mapping = output_mapping
self._device = device
self._forward_call = forward or self._module
self._forward_call = forward if forward is not None else self._module
if optimize_config:
self.optimize_config = self._update_optimize_config(optimize_config)
else:
Expand Down
7 changes: 5 additions & 2 deletions model_navigator/inplace/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def __init__(
current_forward = getattr(module, forward_func)
except AttributeError as e:
raise ModelNavigatorUserInputError(f"Forward method must exist, got {forward_func}.") from e
setattr(module, forward_func, self.__call__)
setattr(module, forward_func, lambda *args, **kwargs: Module.__call__(self, *args, **kwargs))

self.batching = batching
self.precision = precision
Expand Down Expand Up @@ -216,6 +216,7 @@ def load_optimized(
strategy=strategy,
activate_runners=activate_runners,
device=str(device),
forward=self._wrapper._forward_call,
)

def load_recorded(self) -> None:
Expand All @@ -226,6 +227,7 @@ def load_recorded(self) -> None:
input_mapping=self._input_mapping,
output_mapping=self._output_mapping,
optimize_config=self._optimize_config,
forward=self._wrapper._forward_call,
)

def load_eager(self, device: Optional[str] = None) -> None:
Expand All @@ -236,6 +238,7 @@ def load_eager(self, device: Optional[str] = None) -> None:
input_mapping=self._input_mapping,
output_mapping=self._output_mapping,
optimize_config=self._optimize_config,
forward=self._wrapper._forward_call,
)
device = device or self._device
self._wrapper.module.to(device)
Expand Down Expand Up @@ -269,7 +272,7 @@ def module(
precision: precision of the module
Note:
batching if specified takes precedense over corresponding values in the
batching if specified takes precedence over corresponding values in the
configuration specified in nav.profile.
Example:
Expand Down

0 comments on commit b8c265a

Please sign in to comment.