diff --git a/tltorch/factorized_layers/factorized_convolution.py b/tltorch/factorized_layers/factorized_convolution.py index f9b76a3..6102037 100644 --- a/tltorch/factorized_layers/factorized_convolution.py +++ b/tltorch/factorized_layers/factorized_convolution.py @@ -61,10 +61,10 @@ def _ensure_array(layers_shape, order, value, one_per_order=True): return value if isinstance(value, int): - array = np.ones(target_shape, dtype=np.int)*value + array = np.ones(target_shape, dtype=np.int32)*value else: assert len(value) == order - array = np.ones(target_shape, dtype=np.int) + array = np.ones(target_shape, dtype=np.int32) array[..., :] = value return array diff --git a/tltorch/factorized_tensors/core.py b/tltorch/factorized_tensors/core.py index 50b298f..e65e1d7 100644 --- a/tltorch/factorized_tensors/core.py +++ b/tltorch/factorized_tensors/core.py @@ -142,7 +142,8 @@ def __init_subclass__(cls, name, **kwargs): cls._factorizations[name.lower()] = cls cls._name = name else: - warnings.warn(f'Creating a subclass of FactorizedTensor {cls.__name__} with no name.') + if cls.__name__ != "TensorizedTensor": # Don't display warning when instantiating the TensorizedTensor class + warnings.warn(f'Creating a subclass of FactorizedTensor {cls.__name__} with no name.') def __new__(cls, *args, **kwargs): """Customize the creation of a factorized convolution @@ -515,7 +516,7 @@ def to_matrix(self): If several matrices are parametrized, a batch of matrices is returned """ - warnings.warn(f'{self} is being reconstructed into a matrix, consider operating on the decomposed form.') + # warnings.warn(f'{self} is being reconstructed into a matrix, consider operating on the decomposed form.') return self.to_tensor().reshape(self.shape) diff --git a/tltorch/functional/factorized_linear.py b/tltorch/functional/factorized_linear.py index 547b875..1220ed2 100644 --- a/tltorch/functional/factorized_linear.py +++ b/tltorch/functional/factorized_linear.py @@ -4,7 +4,7 @@ # Author: Jean Kossaifi -def linear_tucker(tensor, tucker_matrix, transpose=True): +def linear_tucker(tensor, tucker_matrix, transpose=True, channels_first=True): if transpose: contraction_axis = 1 else: