From b5981a40023ca6ca6453e7b66abf8238ef98d7f9 Mon Sep 17 00:00:00 2001 From: Peng Chen Date: Wed, 3 May 2023 20:54:02 -0700 Subject: [PATCH] add loggings for internal adoption tracking (#426) Summary: Pull Request resolved: https://github.com/facebookresearch/multimodal/pull/426 add adoption loggings for models with most internal usage. To avoid duplicated count, we only enable loggings for model entrypoints. This diff includes: vision transformer, MLP, av_concat_fusion, contrastive loss and transformer fusion. CLIP logging has been enabled previously. Reviewed By: ankitade Differential Revision: D45532797 fbshipit-source-id: 7e1a2a56a99bc0fe180c1103402003dc1ad5cbe0 --- torchmultimodal/modules/layers/mlp.py | 2 +- .../modules/losses/contrastive_loss_with_temperature.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/torchmultimodal/modules/layers/mlp.py b/torchmultimodal/modules/layers/mlp.py index 72dab34a..36a4badf 100644 --- a/torchmultimodal/modules/layers/mlp.py +++ b/torchmultimodal/modules/layers/mlp.py @@ -42,7 +42,7 @@ def __init__( normalization: Optional[Callable[..., nn.Module]] = None, ) -> None: super().__init__() - + torch._C._log_api_usage_once(f"torchmultimodal.{self.__class__.__name__}") layers = nn.ModuleList() if hidden_dims is None: diff --git a/torchmultimodal/modules/losses/contrastive_loss_with_temperature.py b/torchmultimodal/modules/losses/contrastive_loss_with_temperature.py index e84a88b2..ecfe2488 100644 --- a/torchmultimodal/modules/losses/contrastive_loss_with_temperature.py +++ b/torchmultimodal/modules/losses/contrastive_loss_with_temperature.py @@ -165,6 +165,7 @@ def __init__( logit_scale_max: Optional[float] = math.log(100), ): super().__init__() + torch._C._log_api_usage_once(f"torchmultimodal.{self.__class__.__name__}") if not logit_scale_min and not logit_scale_max: raise ValueError(