diff --git a/crates/burn-core/src/optim/adagrad.rs b/crates/burn-core/src/optim/adagrad.rs index f05883ce6a..658932b751 100644 --- a/crates/burn-core/src/optim/adagrad.rs +++ b/crates/burn-core/src/optim/adagrad.rs @@ -5,7 +5,7 @@ use crate::{ use super::{ decay::{WeightDecay, WeightDecayConfig}, - Optimizer, SimpleOptimizer, + SimpleOptimizer, }; use crate::config::Config; use crate::optim::adaptor::OptimizerAdaptor; @@ -79,7 +79,9 @@ impl AdaGradConfig { /// # Returns /// /// Returns an optimizer that can be used to optimize a module. - pub fn init>(&self) -> impl Optimizer { + pub fn init>( + &self, + ) -> OptimizerAdaptor, M, B> { let optim = AdaGrad { lr_decay: LrDecay { lr_decay: self.lr_decay, diff --git a/crates/burn-core/src/optim/adam.rs b/crates/burn-core/src/optim/adam.rs index 6c3f56e856..bf6db8e183 100644 --- a/crates/burn-core/src/optim/adam.rs +++ b/crates/burn-core/src/optim/adam.rs @@ -5,7 +5,7 @@ use crate::{ use super::{ decay::{WeightDecay, WeightDecayConfig}, - Optimizer, SimpleOptimizer, + SimpleOptimizer, }; use crate::config::Config; use crate::optim::adaptor::OptimizerAdaptor; @@ -85,7 +85,9 @@ impl AdamConfig { /// # Returns /// /// Returns an optimizer that can be used to optimize a module. - pub fn init>(&self) -> impl Optimizer { + pub fn init>( + &self, + ) -> OptimizerAdaptor, M, B> { let optim = Adam { momentum: AdaptiveMomentum { beta_1: self.beta_1, diff --git a/crates/burn-core/src/optim/adamw.rs b/crates/burn-core/src/optim/adamw.rs index 4c25d20831..83a7a44f66 100644 --- a/crates/burn-core/src/optim/adamw.rs +++ b/crates/burn-core/src/optim/adamw.rs @@ -4,7 +4,7 @@ use crate::{ }; use std::marker::PhantomData; -use super::{Optimizer, SimpleOptimizer}; +use super::SimpleOptimizer; use crate::config::Config; use crate::optim::adaptor::OptimizerAdaptor; use crate::tensor::{backend::AutodiffBackend, Tensor}; @@ -83,7 +83,9 @@ impl AdamWConfig { /// # Returns /// /// Returns an optimizer that can be used to optimize a module. - pub fn init>(&self) -> impl Optimizer { + pub fn init>( + &self, + ) -> OptimizerAdaptor, M, B> { let optim = AdamW { momentum: AdaptiveMomentumW { beta_1: self.beta_1,