Skip to content

Commit

Permalink
Replace opaque return types in optim (#1767)
Browse files Browse the repository at this point in the history
* update ARCHITECTURE.md links to project architecture section in contributor book

* replace opaque return type in optim
  • Loading branch information
benbaarber committed May 14, 2024
1 parent e4d0cf3 commit d3cd6c4
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 6 deletions.
6 changes: 4 additions & 2 deletions crates/burn-core/src/optim/adagrad.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use crate::{

use super::{
decay::{WeightDecay, WeightDecayConfig},
Optimizer, SimpleOptimizer,
SimpleOptimizer,
};
use crate::config::Config;
use crate::optim::adaptor::OptimizerAdaptor;
Expand Down Expand Up @@ -79,7 +79,9 @@ impl AdaGradConfig {
/// # Returns
///
/// Returns an optimizer that can be used to optimize a module.
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(&self) -> impl Optimizer<M, B> {
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(
&self,
) -> OptimizerAdaptor<AdaGrad<B::InnerBackend>, M, B> {
let optim = AdaGrad {
lr_decay: LrDecay {
lr_decay: self.lr_decay,
Expand Down
6 changes: 4 additions & 2 deletions crates/burn-core/src/optim/adam.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use crate::{

use super::{
decay::{WeightDecay, WeightDecayConfig},
Optimizer, SimpleOptimizer,
SimpleOptimizer,
};
use crate::config::Config;
use crate::optim::adaptor::OptimizerAdaptor;
Expand Down Expand Up @@ -85,7 +85,9 @@ impl AdamConfig {
/// # Returns
///
/// Returns an optimizer that can be used to optimize a module.
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(&self) -> impl Optimizer<M, B> {
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(
&self,
) -> OptimizerAdaptor<Adam<B::InnerBackend>, M, B> {
let optim = Adam {
momentum: AdaptiveMomentum {
beta_1: self.beta_1,
Expand Down
6 changes: 4 additions & 2 deletions crates/burn-core/src/optim/adamw.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use crate::{
};
use std::marker::PhantomData;

use super::{Optimizer, SimpleOptimizer};
use super::SimpleOptimizer;
use crate::config::Config;
use crate::optim::adaptor::OptimizerAdaptor;
use crate::tensor::{backend::AutodiffBackend, Tensor};
Expand Down Expand Up @@ -83,7 +83,9 @@ impl AdamWConfig {
/// # Returns
///
/// Returns an optimizer that can be used to optimize a module.
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(&self) -> impl Optimizer<M, B> {
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(
&self,
) -> OptimizerAdaptor<AdamW<B::InnerBackend>, M, B> {
let optim = AdamW {
momentum: AdaptiveMomentumW {
beta_1: self.beta_1,
Expand Down

0 comments on commit d3cd6c4

Please sign in to comment.