Skip to content

Commit

Permalink
Move rnd_chars to utils (#79)
Browse files Browse the repository at this point in the history
* Move rnd_chars to utils

* Remove whitespace

* Update CHANGELOG

* Fix clippy warnings

* Remove unwanted changes
  • Loading branch information
evgeniy-r committed Aug 2, 2021
1 parent d65c0b3 commit 24eb27b
Show file tree
Hide file tree
Showing 11 changed files with 70 additions and 59 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Expand Up @@ -19,6 +19,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
([@evgeniy-r](https://github.com/evgeniy-r))

### ⚙️ Changed
- Move `rnd_chars()` from `token` to `utils` [#79](https://github.com/datanymizer/datanymizer/pull/79)
([@evgeniy-r](https://github.com/evgeniy-r))
- Change transformer initialization (`set_defaults` -> `init`) [#76](https://github.com/datanymizer/datanymizer/pull/76)
([@evgeniy-r](https://github.com/evgeniy-r))

Expand Down
4 changes: 2 additions & 2 deletions datanymizer_dumper/src/lib.rs
Expand Up @@ -85,7 +85,7 @@ pub trait SchemaInspector: 'static + Sized + Send + Clone {
if let Ok(tables) = self.get_tables(connection) {
for table in tables.iter() {
let deps: Vec<Self::Table> = self
.get_dependencies(connection, &table)
.get_dependencies(connection, table)
.unwrap_or_default()
.into_iter()
.collect();
Expand All @@ -94,7 +94,7 @@ pub trait SchemaInspector: 'static + Sized + Send + Clone {

for table in tables.iter() {
let _ = res.entry(table.clone()).or_insert(0);
if let Ok(nodes) = depgraph.dependencies_of(&table) {
if let Ok(nodes) = depgraph.dependencies_of(table) {
for node in nodes.flatten() {
let counter = res.entry(node.clone()).or_insert(0);
*counter += 1;
Expand Down
2 changes: 1 addition & 1 deletion datanymizer_dumper/src/postgres/dumper.rs
Expand Up @@ -118,7 +118,7 @@ impl PgDumper {
self.write_log(format!("Dump table: {}", &table.get_full_name()))?;

self.dump_writer.write_all(b"\n")?;
self.dump_writer.write_all(&table.query_from().as_bytes())?;
self.dump_writer.write_all(table.query_from().as_bytes())?;
self.dump_writer.write_all(b"\n")?;

let cfg = settings.get_table(table.get_name().as_str());
Expand Down
12 changes: 6 additions & 6 deletions datanymizer_dumper/src/postgres/schema_inspector.rs
Expand Up @@ -101,16 +101,16 @@ impl SchemaInspector for PgSchemaInspector {
connection: &mut <Self::Dumper as Dumper>::Connection,
table: &Self::Table,
) -> Result<Vec<Self::Table>> {
let fkeys: Vec<ForeignKey> = connection
let fkeys_iterator = connection
.query(TABLE_FOREIGN_KEYS, &[&table.get_name()])?
.into_iter()
.map(|row| row.into())
.collect();
.map(|row| row.into());

let tables: Vec<Self::Table> = fkeys
.into_iter()
let tables: Vec<Self::Table> = fkeys_iterator
// Table from foreign key
.map(|fkey| PgTable::new(fkey.foreign_table_name, Some(fkey.foreign_table_schema)))
.map(|fkey: ForeignKey| {
PgTable::new(fkey.foreign_table_name, Some(fkey.foreign_table_schema))
})
// Columns for table
.map(|mut table| {
if let Ok(columns) = self.get_columns(connection, &table) {
Expand Down
1 change: 1 addition & 0 deletions datanymizer_engine/src/lib.rs
Expand Up @@ -6,6 +6,7 @@ pub(crate) mod store;
mod transformer;
pub mod transformers;
pub(crate) mod uniq_collector;
mod utils;
mod value;

pub use engine::Engine;
Expand Down
2 changes: 1 addition & 1 deletion datanymizer_engine/src/transformer/uniq_transformer.rs
Expand Up @@ -18,7 +18,7 @@ pub trait UniqTransformer {
let mut count = self.try_count();
while count > 0 {
let val = self.do_transform(field_name, field_value, ctx);
if uniq_collector::add_to_collector(&field_name, &val) {
if uniq_collector::add_to_collector(field_name, &val) {
return Some(val);
} else {
count -= 1;
Expand Down
7 changes: 5 additions & 2 deletions datanymizer_engine/src/transformers/token/base64.rs
@@ -1,4 +1,7 @@
use crate::transformer::{TransformContext, TransformResult, TransformResultHelper, Transformer};
use crate::{
transformer::{TransformContext, TransformResult, TransformResultHelper, Transformer},
utils,
};
use serde::{Deserialize, Serialize};

const DEFAULT_LENGTH: usize = 32;
Expand Down Expand Up @@ -65,7 +68,7 @@ impl Transformer for Base64TokenTransformer {
};
TransformResult::present(format!(
"{}{}",
super::rnd_chars(self.len - self.pad, &CHARS),
utils::rnd_chars(self.len - self.pad, &CHARS),
padding
))
}
Expand Down
7 changes: 5 additions & 2 deletions datanymizer_engine/src/transformers/token/base64url.rs
@@ -1,4 +1,7 @@
use crate::transformer::{TransformContext, TransformResult, TransformResultHelper, Transformer};
use crate::{
transformer::{TransformContext, TransformResult, TransformResultHelper, Transformer},
utils,
};
use serde::{Deserialize, Serialize};

const DEFAULT_LENGTH: usize = 32;
Expand Down Expand Up @@ -65,7 +68,7 @@ impl Transformer for Base64UrlTokenTransformer {
};
TransformResult::present(format!(
"{}{}",
super::rnd_chars(self.len - self.pad, &CHARS),
utils::rnd_chars(self.len - self.pad, &CHARS),
padding
))
}
Expand Down
7 changes: 5 additions & 2 deletions datanymizer_engine/src/transformers/token/hex.rs
@@ -1,4 +1,7 @@
use crate::transformer::{TransformContext, TransformResult, TransformResultHelper, Transformer};
use crate::{
transformer::{TransformContext, TransformResult, TransformResultHelper, Transformer},
utils,
};
use serde::{Deserialize, Serialize};

const DEFAULT_LENGTH: usize = 32;
Expand Down Expand Up @@ -49,7 +52,7 @@ impl Transformer for HexTokenTransformer {
_field_value: &str,
_ctx: &Option<TransformContext>,
) -> TransformResult {
TransformResult::present(super::rnd_chars(self.len, &CHARS))
TransformResult::present(utils::rnd_chars(self.len, &CHARS))
}
}

Expand Down
43 changes: 0 additions & 43 deletions datanymizer_engine/src/transformers/token/mod.rs
@@ -1,5 +1,3 @@
use rand::distributions::{Distribution, Uniform};

mod base64;
pub use base64::Base64TokenTransformer;

Expand All @@ -8,44 +6,3 @@ pub use base64url::Base64UrlTokenTransformer;

mod hex;
pub use hex::HexTokenTransformer;

fn rnd_chars(len: usize, src: &[char]) -> String {
let rng = rand::thread_rng();
let distribution = Uniform::<usize>::from(0..src.len());
distribution
.sample_iter(rng)
.take(len)
.map(|i| src[i])
.collect::<String>()
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn same_char() {
let chars = vec!['a'];

assert_eq!(rnd_chars(0, &chars), "");
assert_eq!(rnd_chars(1, &chars), "a");
assert_eq!(rnd_chars(4, &chars), "aaaa");
}

#[test]
fn different_chars() {
let chars = vec!['a', 'b', 'c'];

assert_eq!(rnd_chars(0, &chars), "");

let s = rnd_chars(1, &chars);
for ch in s.chars() {
assert!(chars.contains(&ch));
}

let s = rnd_chars(5, &chars);
for ch in s.chars() {
assert!(chars.contains(&ch));
}
}
}
42 changes: 42 additions & 0 deletions datanymizer_engine/src/utils.rs
@@ -0,0 +1,42 @@
use rand::distributions::{Distribution, Uniform};

pub fn rnd_chars(len: usize, src: &[char]) -> String {
let rng = rand::thread_rng();
let distribution = Uniform::<usize>::from(0..src.len());
distribution
.sample_iter(rng)
.take(len)
.map(|i| src[i])
.collect::<String>()
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn same_char() {
let chars = vec!['a'];

assert_eq!(rnd_chars(0, &chars), "");
assert_eq!(rnd_chars(1, &chars), "a");
assert_eq!(rnd_chars(4, &chars), "aaaa");
}

#[test]
fn different_chars() {
let chars = vec!['a', 'b', 'c'];

assert_eq!(rnd_chars(0, &chars), "");

let s = rnd_chars(1, &chars);
for ch in s.chars() {
assert!(chars.contains(&ch));
}

let s = rnd_chars(5, &chars);
for ch in s.chars() {
assert!(chars.contains(&ch));
}
}
}

0 comments on commit 24eb27b

Please sign in to comment.