Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rename type and trait to comply with naming guidelines #33

Merged
merged 1 commit into from
Nov 29, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions benches/pq.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@ use rand_distr::Normal;
use test::Bencher;

use reductive::ndarray_rand::RandomExt;
use reductive::pq::{QuantizeVector, TrainPQ, PQ};
use reductive::pq::{QuantizeVector, TrainPq, Pq};

#[bench]
fn pq_quantize(bencher: &mut Bencher) {
let data: Array2<f64> = Array2::random((100, 128), Normal::new(0., 1.).unwrap());
let pq = PQ::train_pq(16, 4, 10, 1, data.view()).unwrap();
let pq = Pq::train_pq(16, 4, 10, 1, data.view()).unwrap();

bencher.iter(|| {
for v in data.outer_iter() {
Expand All @@ -24,7 +24,7 @@ fn pq_quantize(bencher: &mut Bencher) {
#[bench]
fn pq_quantize_batch(bencher: &mut Bencher) {
let data: Array2<f64> = Array2::random((100, 128), Normal::new(0., 1.).unwrap());
let pq = PQ::train_pq(16, 4, 10, 1, data.view()).unwrap();
let pq = Pq::train_pq(16, 4, 10, 1, data.view()).unwrap();

bencher.iter(|| {
let _: Array2<u8> = pq.quantize_batch(data.view());
Expand Down
16 changes: 8 additions & 8 deletions src/pq/gaussian_opq.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use ndarray_linalg::types::Scalar;
use num_traits::AsPrimitive;
use rand::{CryptoRng, RngCore, SeedableRng};

use super::{TrainPQ, OPQ, PQ};
use super::{Pq, TrainPq, OPQ};

/// Optimized product quantizer for Gaussian variables (Ge et al., 2013).
///
Expand All @@ -23,7 +23,7 @@ use super::{TrainPQ, OPQ, PQ};
/// quantization.
pub struct GaussianOPQ;

impl<A> TrainPQ<A> for GaussianOPQ
impl<A> TrainPq<A> for GaussianOPQ
where
A: Lapack + NdFloat + Scalar + Sum,
A::Real: NdFloat,
Expand All @@ -36,12 +36,12 @@ where
n_attempts: usize,
instances: ArrayBase<S, Ix2>,
rng: &mut R,
) -> Result<PQ<A>, rand::Error>
) -> Result<Pq<A>, rand::Error>
where
S: Sync + Data<Elem = A>,
R: CryptoRng + RngCore + SeedableRng + Send,
{
PQ::check_quantizer_invariants(
Pq::check_quantizer_invariants(
n_subquantizers,
n_subquantizer_bits,
n_iterations,
Expand All @@ -51,7 +51,7 @@ where

let projection = OPQ::create_projection_matrix(instances.view(), n_subquantizers);
let rx = instances.dot(&projection);
let pq = PQ::train_pq_using(
let pq = Pq::train_pq_using(
n_subquantizers,
n_subquantizer_bits,
n_iterations,
Expand All @@ -60,7 +60,7 @@ where
rng,
)?;

Ok(PQ {
Ok(Pq {
projection: Some(projection),
quantizers: pq.quantizers,
})
Expand All @@ -77,12 +77,12 @@ mod tests {
use super::GaussianOPQ;
use crate::linalg::EuclideanDistance;
use crate::ndarray_rand::RandomExt;
use crate::pq::{QuantizeVector, ReconstructVector, TrainPQ, PQ};
use crate::pq::{Pq, QuantizeVector, ReconstructVector, TrainPq};

/// Calculate the average euclidean distances between the the given
/// instances and the instances returned by quantizing and then
/// reconstructing the instances.
fn avg_euclidean_loss(instances: ArrayView2<f32>, quantizer: &PQ<f32>) -> f32 {
fn avg_euclidean_loss(instances: ArrayView2<f32>, quantizer: &Pq<f32>) -> f32 {
let mut euclidean_loss = 0f32;

let quantized: Array2<u8> = quantizer.quantize_batch(instances);
Expand Down
4 changes: 2 additions & 2 deletions src/pq/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ pub(crate) mod primitives;

#[allow(clippy::module_inception)]
mod pq;
pub use self::pq::PQ;
pub use self::pq::Pq;

mod traits;
pub use self::traits::{QuantizeVector, ReconstructVector, TrainPQ};
pub use self::traits::{QuantizeVector, ReconstructVector, TrainPq};
16 changes: 8 additions & 8 deletions src/pq/opq.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use crate::kmeans::KMeansIteration;
use crate::linalg::Covariance;

use super::primitives;
use super::{TrainPQ, PQ};
use super::{Pq, TrainPq};

/// Optimized product quantizer (Ge et al., 2013).
///
Expand All @@ -37,7 +37,7 @@ use super::{TrainPQ, PQ};
/// no effect.
pub struct OPQ;

impl<A> TrainPQ<A> for OPQ
impl<A> TrainPq<A> for OPQ
where
A: Lapack + NdFloat + Scalar + Sum,
A::Real: NdFloat,
Expand All @@ -50,12 +50,12 @@ where
_n_attempts: usize,
instances: ArrayBase<S, Ix2>,
mut rng: &mut R,
) -> Result<PQ<A>, rand::Error>
) -> Result<Pq<A>, rand::Error>
where
S: Sync + Data<Elem = A>,
R: RngCore,
{
PQ::check_quantizer_invariants(
Pq::check_quantizer_invariants(
n_subquantizers,
n_subquantizer_bits,
n_iterations,
Expand Down Expand Up @@ -92,7 +92,7 @@ where
);
}

Ok(PQ {
Ok(Pq {
projection: Some(projection),
quantizers,
})
Expand Down Expand Up @@ -147,7 +147,7 @@ impl OPQ {
{
(0..n_subquantizers)
.map(|sq| {
PQ::subquantizer_initial_centroids(
Pq::subquantizer_initial_centroids(
sq,
n_subquantizers,
codebook_len,
Expand Down Expand Up @@ -282,12 +282,12 @@ mod tests {
use super::OPQ;
use crate::linalg::EuclideanDistance;
use crate::ndarray_rand::RandomExt;
use crate::pq::{QuantizeVector, ReconstructVector, TrainPQ, PQ};
use crate::pq::{Pq, QuantizeVector, ReconstructVector, TrainPq};

/// Calculate the average euclidean distances between the the given
/// instances and the instances returned by quantizing and then
/// reconstructing the instances.
fn avg_euclidean_loss(instances: ArrayView2<f32>, quantizer: &PQ<f32>) -> f32 {
fn avg_euclidean_loss(instances: ArrayView2<f32>, quantizer: &Pq<f32>) -> f32 {
let mut euclidean_loss = 0f32;

let quantized: Array2<u8> = quantizer.quantize_batch(instances);
Expand Down
36 changes: 18 additions & 18 deletions src/pq/pq.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use rand::{Rng, RngCore, SeedableRng};
use rayon::prelude::*;

use super::primitives;
use super::{QuantizeVector, ReconstructVector, TrainPQ};
use super::{QuantizeVector, ReconstructVector, TrainPq};
use crate::kmeans::{
InitialCentroids, KMeansWithCentroids, NIterationsCondition, RandomInstanceCentroids,
};
Expand All @@ -25,12 +25,12 @@ use rand_xorshift::XorShiftRng;
/// *i*-th subquantizer. Vector reconstruction consists of concatenating
/// the centroids that represent the slices.
#[derive(Clone, Debug, PartialEq)]
pub struct PQ<A> {
pub struct Pq<A> {
pub(crate) projection: Option<Array2<A>>,
pub(crate) quantizers: Array3<A>,
}

impl<A> PQ<A>
impl<A> Pq<A>
where
A: NdFloat,
{
Expand All @@ -53,7 +53,7 @@ where
);
}

PQ {
Pq {
projection,
quantizers,
}
Expand Down Expand Up @@ -156,7 +156,7 @@ where
let sq_instances = instances.slice(s![.., offset..offset + sq_dims]);

iter::repeat_with(|| {
let mut quantizer = PQ::subquantizer_initial_centroids(
let mut quantizer = Pq::subquantizer_initial_centroids(
subquantizer_idx,
n_subquantizers,
codebook_len,
Expand All @@ -183,7 +183,7 @@ where
}
}

impl<A> TrainPQ<A> for PQ<A>
impl<A> TrainPq<A> for Pq<A>
where
A: NdFloat + Sum,
usize: AsPrimitive<A>,
Expand All @@ -195,7 +195,7 @@ where
n_attempts: usize,
instances: ArrayBase<S, Ix2>,
mut rng: &mut R,
) -> Result<PQ<A>, rand::Error>
) -> Result<Pq<A>, rand::Error>
where
S: Sync + Data<Elem = A>,
R: RngCore + SeedableRng + Send,
Expand Down Expand Up @@ -231,14 +231,14 @@ where

let views = quantizers.iter().map(|a| a.view()).collect::<Vec<_>>();

Ok(PQ {
Ok(Pq {
projection: None,
quantizers: concatenate(Axis(0), &views).expect("Cannot concatenate subquantizers"),
})
}
}

impl<A> QuantizeVector<A> for PQ<A>
impl<A> QuantizeVector<A> for Pq<A>
where
A: NdFloat + Sum,
{
Expand Down Expand Up @@ -291,7 +291,7 @@ where
}
}

impl<A> ReconstructVector<A> for PQ<A>
impl<A> ReconstructVector<A> for Pq<A>
where
A: NdFloat + Sum,
{
Expand Down Expand Up @@ -349,15 +349,15 @@ mod tests {
use rand::SeedableRng;
use rand_chacha::ChaCha8Rng;

use super::PQ;
use super::Pq;
use crate::linalg::EuclideanDistance;
use crate::ndarray_rand::RandomExt;
use crate::pq::{QuantizeVector, ReconstructVector, TrainPQ};
use crate::pq::{QuantizeVector, ReconstructVector, TrainPq};

/// Calculate the average euclidean distances between the the given
/// instances and the instances returned by quantizing and then
/// reconstructing the instances.
fn avg_euclidean_loss(instances: ArrayView2<f32>, quantizer: &PQ<f32>) -> f32 {
fn avg_euclidean_loss(instances: ArrayView2<f32>, quantizer: &Pq<f32>) -> f32 {
let mut euclidean_loss = 0f32;

let quantized: Array2<u8> = quantizer.quantize_batch(instances);
Expand Down Expand Up @@ -392,10 +392,10 @@ mod tests {
]
}

fn test_pq() -> PQ<f32> {
fn test_pq() -> Pq<f32> {
let quantizers = array![[[1., 0., 0.], [0., 1., 0.]], [[1., -1., 0.], [0., 1., 0.]],];

PQ {
Pq {
projection: None,
quantizers,
}
Expand Down Expand Up @@ -428,7 +428,7 @@ mod tests {
let mut rng = ChaCha8Rng::seed_from_u64(42);
let uniform = Uniform::new(0f32, 1f32);
let instances = Array2::random_using((256, 20), uniform, &mut rng);
let pq = PQ::train_pq_using(10, 7, 10, 1, instances.view(), &mut rng).unwrap();
let pq = Pq::train_pq_using(10, 7, 10, 1, instances.view(), &mut rng).unwrap();
let loss = avg_euclidean_loss(instances.view(), &pq);
// Loss is around 0.077.
assert!(loss < 0.08);
Expand All @@ -437,7 +437,7 @@ mod tests {
#[test]
fn quantize_with_type() {
let uniform = Uniform::new(0f32, 1f32);
let pq = PQ {
let pq = Pq {
projection: None,
quantizers: Array3::random((1, 256, 10), uniform),
};
Expand All @@ -448,7 +448,7 @@ mod tests {
#[should_panic]
fn quantize_with_too_narrow_type() {
let uniform = Uniform::new(0f32, 1f32);
let pq = PQ {
let pq = Pq {
projection: None,
quantizers: Array3::random((1, 257, 10), uniform),
};
Expand Down
8 changes: 4 additions & 4 deletions src/pq/traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@ use num_traits::{AsPrimitive, Bounded, Zero};
use rand::{CryptoRng, RngCore, SeedableRng};
use rand_chacha::ChaCha8Rng;

use crate::pq::PQ;
use crate::pq::Pq;

/// Training triat for product quantizers.
///
/// This traits specifies the training functions for product
/// quantizers.
pub trait TrainPQ<A> {
pub trait TrainPq<A> {
/// Train a product quantizer with the xorshift PRNG.
///
/// Train a product quantizer with `n_subquantizers` subquantizers
Expand All @@ -23,7 +23,7 @@ pub trait TrainPQ<A> {
n_iterations: usize,
n_attempts: usize,
instances: ArrayBase<S, Ix2>,
) -> Result<PQ<A>, rand::Error>
) -> Result<Pq<A>, rand::Error>
where
S: Sync + Data<Elem = A>,
{
Expand Down Expand Up @@ -56,7 +56,7 @@ pub trait TrainPQ<A> {
n_attempts: usize,
instances: ArrayBase<S, Ix2>,
rng: &mut R,
) -> Result<PQ<A>, rand::Error>
) -> Result<Pq<A>, rand::Error>
where
S: Sync + Data<Elem = A>,
R: CryptoRng + RngCore + SeedableRng + Send;
Expand Down