Skip to content

Commit

Permalink
Standardize (1|2)(d|D) into (1|2)d
Browse files Browse the repository at this point in the history
  • Loading branch information
EricLBuehler committed Sep 10, 2023
1 parent 005d4f6 commit f4733cc
Show file tree
Hide file tree
Showing 6 changed files with 41 additions and 41 deletions.
28 changes: 14 additions & 14 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
#[doc = include_str!("../README.md")]
use candle_core::{Shape, Tensor};
use candle_nn::{Conv1d, Conv1dConfig, Conv2d, Conv2dConfig, Linear, Module};
use loraconv1d::{LoraConv1D, LoraConv1DConfig};
use loraconv2d::{LoraConv2D, LoraConv2DConfig};
use loraconv1d::{LoraConv1d, LoraConv1dConfig};
use loraconv2d::{LoraConv2d, LoraConv2dConfig};
use loralinear::{LoraLinear, LoraLinearConfig};
use std::{collections::HashMap, hash::Hash};

Expand Down Expand Up @@ -35,14 +35,14 @@ impl Lora {
for (name, layer) in selected.conv1d {
new.conv1d.insert(
name,
LoraConv1D::new(layer, selected.conv1d_config.as_ref().unwrap()).unwrap(),
LoraConv1d::new(layer, selected.conv1d_config.as_ref().unwrap()).unwrap(),
);
}

for (name, layer) in selected.conv2d {
new.conv2d.insert(
name,
LoraConv2D::new(layer, selected.conv2d_config.as_ref().unwrap()).unwrap(),
LoraConv2d::new(layer, selected.conv2d_config.as_ref().unwrap()).unwrap(),
);
}

Expand All @@ -54,15 +54,15 @@ pub struct SelectedLayers<'a, T: Eq + PartialEq + Hash> {
pub linear: HashMap<T, &'a dyn LinearLayerLike>,
pub linear_config: Option<LoraLinearConfig<'a>>,
pub conv1d: HashMap<T, &'a dyn Conv1dLayerLike>,
pub conv1d_config: Option<LoraConv1DConfig<'a>>,
pub conv1d_config: Option<LoraConv1dConfig<'a>>,
pub conv2d: HashMap<T, &'a dyn Conv2dLayerLike>,
pub conv2d_config: Option<LoraConv2DConfig<'a>>,
pub conv2d_config: Option<LoraConv2dConfig<'a>>,
}

pub struct NewLayers<T: Eq + PartialEq + Hash> {
pub linear: HashMap<T, LoraLinear>,
pub conv1d: HashMap<T, LoraConv1D>,
pub conv2d: HashMap<T, LoraConv2D>,
pub conv1d: HashMap<T, LoraConv1d>,
pub conv2d: HashMap<T, LoraConv2d>,
}

pub trait LinearLayerLike: Module {
Expand Down Expand Up @@ -90,19 +90,19 @@ pub trait Conv1dLayerLike: Module {
}

#[derive(Debug)]
pub struct Conv1DWithWB {
pub struct Conv1dWithWB {
pub this: Conv1d,
pub weights: Tensor,
pub bias: Option<Tensor>,
}

impl Module for Conv1DWithWB {
impl Module for Conv1dWithWB {
fn forward(&self, xs: &Tensor) -> candle_core::Result<Tensor> {
self.this.forward(xs)
}
}

impl Conv1dLayerLike for Conv1DWithWB {
impl Conv1dLayerLike for Conv1dWithWB {
fn config(&self) -> &Conv1dConfig {
self.this.config()
}
Expand All @@ -121,19 +121,19 @@ pub trait Conv2dLayerLike: Module {
}

#[derive(Debug)]
pub struct Conv2DWithWB {
pub struct Conv2dWithWB {
pub this: Conv2d,
pub weights: Tensor,
pub bias: Option<Tensor>,
}

impl Module for Conv2DWithWB {
impl Module for Conv2dWithWB {
fn forward(&self, xs: &Tensor) -> candle_core::Result<Tensor> {
self.this.forward(xs)
}
}

impl Conv2dLayerLike for Conv2DWithWB {
impl Conv2dLayerLike for Conv2dWithWB {
fn config(&self) -> &Conv2dConfig {
self.this.config()
}
Expand Down
18 changes: 9 additions & 9 deletions src/loraconv1d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,14 @@ use candle_nn::{init, Conv1dConfig, VarMap};
use crate::{frozenconv::FrozenConv1d, Conv1dLayerLike};

#[derive(Debug)]
pub struct LoraConv1D {
pub struct LoraConv1d {
old: FrozenConv1d,
a: Tensor,
b: Tensor,
scale: Option<f64>,
}

pub struct LoraConv1DConfig<'a> {
pub struct LoraConv1dConfig<'a> {
pub rank: usize,
pub alpha: f64,
pub kernel_size: usize,
Expand All @@ -23,15 +23,15 @@ pub struct LoraConv1DConfig<'a> {
pub out_channels: usize,
}

impl<'a> LoraConv1DConfig<'a> {
impl<'a> LoraConv1dConfig<'a> {
pub fn default(
device: &'a Device,
dtype: DType,
kernel_size: usize,
in_channels: usize,
out_channels: usize,
) -> Self {
LoraConv1DConfig {
LoraConv1dConfig {
rank: 1,
alpha: 1.,
kernel_size,
Expand All @@ -43,8 +43,8 @@ impl<'a> LoraConv1DConfig<'a> {
}
}

impl LoraConv1D {
pub fn new(old: &dyn Conv1dLayerLike, config: &LoraConv1DConfig) -> Result<Self> {
impl LoraConv1d {
pub fn new(old: &dyn Conv1dLayerLike, config: &LoraConv1dConfig) -> Result<Self> {
let map = VarMap::new();
let a = map.get(
(
Expand All @@ -67,7 +67,7 @@ impl LoraConv1D {
config.device,
)?;

Ok(LoraConv1D {
Ok(LoraConv1d {
old: FrozenConv1d::new_from_conv1d(old)?,
a,
b,
Expand All @@ -80,7 +80,7 @@ impl LoraConv1D {
}
}

impl Module for LoraConv1D {
impl Module for LoraConv1d {
fn forward(&self, input: &Tensor) -> Result<Tensor> {
if let Some(scale) = self.scale {
let x = input;
Expand Down Expand Up @@ -113,7 +113,7 @@ impl Module for LoraConv1D {
}
}

impl Conv1dLayerLike for LoraConv1D {
impl Conv1dLayerLike for LoraConv1d {
fn config(&self) -> &Conv1dConfig {
self.old.config()
}
Expand Down
18 changes: 9 additions & 9 deletions src/loraconv2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,14 @@ use candle_nn::{init, Conv2dConfig, VarMap};
use crate::{frozenconv::FrozenConv2d, Conv2dLayerLike};

#[derive(Debug)]
pub struct LoraConv2D {
pub struct LoraConv2d {
old: FrozenConv2d,
a: Tensor,
b: Tensor,
scale: Option<f64>,
}

pub struct LoraConv2DConfig<'a> {
pub struct LoraConv2dConfig<'a> {
pub rank: usize,
pub alpha: f64,
pub kernel_size: usize,
Expand All @@ -23,15 +23,15 @@ pub struct LoraConv2DConfig<'a> {
out_channels: usize,
}

impl<'a> LoraConv2DConfig<'a> {
impl<'a> LoraConv2dConfig<'a> {
pub fn default(
device: &'a Device,
dtype: DType,
kernel_size: usize,
in_channels: usize,
out_channels: usize,
) -> Self {
LoraConv2DConfig {
LoraConv2dConfig {
rank: 1,
alpha: 1.,
kernel_size,
Expand All @@ -43,8 +43,8 @@ impl<'a> LoraConv2DConfig<'a> {
}
}

impl LoraConv2D {
pub fn new(old: &dyn Conv2dLayerLike, config: &LoraConv2DConfig) -> Result<Self> {
impl LoraConv2d {
pub fn new(old: &dyn Conv2dLayerLike, config: &LoraConv2dConfig) -> Result<Self> {
let map = VarMap::new();
let a = map.get(
(
Expand All @@ -67,7 +67,7 @@ impl LoraConv2D {
config.device,
)?;

Ok(LoraConv2D {
Ok(LoraConv2d {
old: FrozenConv2d::new_from_conv2d(old)?,
a,
b,
Expand All @@ -80,7 +80,7 @@ impl LoraConv2D {
}
}

impl Module for LoraConv2D {
impl Module for LoraConv2d {
fn forward(&self, input: &Tensor) -> Result<Tensor> {
if let Some(scale) = self.scale {
let x = input;
Expand Down Expand Up @@ -112,7 +112,7 @@ impl Module for LoraConv2D {
}
}

impl Conv2dLayerLike for LoraConv2D {
impl Conv2dLayerLike for LoraConv2d {
fn config(&self) -> &Conv2dConfig {
self.old.config()
}
Expand Down
6 changes: 3 additions & 3 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::{collections::HashMap, hash::Hash};

use candle_core::{DType, Device, Result, Tensor};
use candle_lora::{
loraconv2d::LoraConv2DConfig, Conv2DWithWB, Conv2dLayerLike, Lora, NewLayers, SelectedLayers,
loraconv2d::LoraConv2dConfig, Conv2dLayerLike, Conv2dWithWB, Lora, NewLayers, SelectedLayers,
};
use candle_nn::{init, Conv2d, Conv2dConfig, Module, VarMap};

Expand Down Expand Up @@ -64,7 +64,7 @@ fn main() -> Result<()> {
&device,
)?;

let conv = Conv2DWithWB {
let conv = Conv2dWithWB {
this: Conv2d::new(conv_weight.clone(), Some(conv_bias.clone()), cfg),
weights: conv_weight,
bias: Some(conv_bias),
Expand Down Expand Up @@ -92,7 +92,7 @@ fn main() -> Result<()> {
conv1d: conv1d_layers,
conv1d_config: None,
conv2d: conv2d_layers,
conv2d_config: Some(LoraConv2DConfig::default(
conv2d_config: Some(LoraConv2dConfig::default(
&device,
dtype,
kernel,
Expand Down
6 changes: 3 additions & 3 deletions tests/conv1d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ fn conv1d() -> candle_core::Result<()> {

use candle_core::{DType, Device, Result, Tensor};
use candle_lora::{
loraconv1d::LoraConv1DConfig, Conv1DWithWB, Conv1dLayerLike, Lora, NewLayers,
loraconv1d::LoraConv1dConfig, Conv1dLayerLike, Conv1dWithWB, Lora, NewLayers,
SelectedLayers,
};
use candle_nn::{init, Conv1d, Conv1dConfig, Module, VarMap};
Expand Down Expand Up @@ -55,7 +55,7 @@ fn conv1d() -> candle_core::Result<()> {
&device,
)?;

let conv = Conv1DWithWB {
let conv = Conv1dWithWB {
this: Conv1d::new(
conv_weight.clone(),
Some(conv_bias.clone()),
Expand Down Expand Up @@ -84,7 +84,7 @@ fn conv1d() -> candle_core::Result<()> {
linear: linear_layers,
linear_config: None,
conv1d: conv1d_layers,
conv1d_config: Some(LoraConv1DConfig::default(&device, dtype, 1, 10, 10)),
conv1d_config: Some(LoraConv1dConfig::default(&device, dtype, 1, 10, 10)),
conv2d: conv2d_layers,
conv2d_config: None,
};
Expand Down
6 changes: 3 additions & 3 deletions tests/conv2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ fn main() -> candle_core::Result<()> {

use candle_core::{DType, Device, Result, Tensor};
use candle_lora::{
loraconv2d::LoraConv2DConfig, Conv2DWithWB, Conv2dLayerLike, Lora, NewLayers,
loraconv2d::LoraConv2dConfig, Conv2dLayerLike, Conv2dWithWB, Lora, NewLayers,
SelectedLayers,
};
use candle_nn::{init, Conv2d, Conv2dConfig, Module, VarMap};
Expand Down Expand Up @@ -65,7 +65,7 @@ fn main() -> candle_core::Result<()> {
&device,
)?;

let conv = Conv2DWithWB {
let conv = Conv2dWithWB {
this: Conv2d::new(conv_weight.clone(), Some(conv_bias.clone()), cfg),
weights: conv_weight,
bias: Some(conv_bias),
Expand Down Expand Up @@ -93,7 +93,7 @@ fn main() -> candle_core::Result<()> {
conv1d: conv1d_layers,
conv1d_config: None,
conv2d: conv2d_layers,
conv2d_config: Some(LoraConv2DConfig::default(
conv2d_config: Some(LoraConv2dConfig::default(
&device,
dtype,
kernel,
Expand Down

0 comments on commit f4733cc

Please sign in to comment.