Skip to content

Commit

Permalink
Fixes #15253: Sending & Receiving files with the shared-files API
Browse files Browse the repository at this point in the history
  • Loading branch information
bernsteining committed Jul 30, 2019
1 parent 1499931 commit 1d936f1
Show file tree
Hide file tree
Showing 5 changed files with 173 additions and 60 deletions.
17 changes: 17 additions & 0 deletions relay/sources/relayd/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions relay/sources/relayd/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@ itertools = "0.8"
md-5 = "0.8"
hyper = "0.12"
sha2 = "0.8"
bytebuffer = "0.2.1"
bytes = "0.4.12"

[dev-dependencies]
criterion = "0.2"
Expand Down
39 changes: 29 additions & 10 deletions relay/sources/relayd/src/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,17 +31,21 @@
use crate::{
error::Error,
remote_run::{RemoteRun, RemoteRunTarget},
shared_files::{metadata_hash_checker, metadata_writer, parse_hash_from_raw, Metadata},
shared_files::{
file_writer, metadata_hash_checker, metadata_parser, metadata_writer,
parse_parameter_from_raw, parse_ttl,
},
{stats::Stats, status::Status, JobConfig},
};

use futures::Future;
use std::{
collections::HashMap,
net::SocketAddr,
sync::{Arc, RwLock},
};
use tracing::info;
use warp::{body, filters, filters::method::v2::*, path, reject::custom, reply, Filter};
use warp::{body, filters, filters::method::v2::*, path, reject::custom, reply, Buf, Filter};

pub fn api(
listen: SocketAddr,
Expand Down Expand Up @@ -111,21 +115,36 @@ pub fn api(
},
);

let shared_files_put = put().and(path::peek()).and(body::form()).map(
move |peek: filters::path::Peek, simple_map: HashMap<String, String>| {
let metadata = Metadata::new(simple_map);
metadata_writer(format!("{}", metadata.unwrap()), peek.as_str());
reply()
},
);
let shared_files_put = put()
.and(filters::query::raw())
.and(path::peek())
.and(warp::body::concat())
.map(
move |ttl: String, peek: filters::path::Peek, mut buf: warp::body::FullBody| {
metadata_writer(
format!(
"{}\nexpires={}",
metadata_parser(buf.by_ref()).join("\n"),
parse_ttl(parse_parameter_from_raw(ttl)).unwrap()
),
peek.as_str(),
);

file_writer(buf.by_ref(), peek.as_str());
reply()
},
);

let shared_files_head = head()
.and(path::peek())
.and(filters::query::raw()) // recuperation du parametre ?hash=file-hash
.map(|peek: filters::path::Peek, raw: String| {
reply::with_status(
"".to_string(),
metadata_hash_checker(format!("./{}", peek.as_str()), parse_hash_from_raw(raw)),
metadata_hash_checker(
format!("./{}", peek.as_str()),
parse_parameter_from_raw(raw),
),
)
});

Expand Down
2 changes: 2 additions & 0 deletions relay/sources/relayd/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ pub enum Error {
MissingTargetNodes,
InvalidHashType,
InvalidLogFilter(tracing_fmt::filter::env::ParseError),
InvalidHeader,
}

impl Display for Error {
Expand Down Expand Up @@ -116,6 +117,7 @@ impl Display for Error {
"Invalid hash type provided, available hash types : sha256, sha512"
),
InvalidLogFilter(ref err) => write!(f, "Log filter is invalid: {}", err),
InvalidHeader => write!(f, "Invalid header"),
}
}
}
Expand Down
173 changes: 123 additions & 50 deletions relay/sources/relayd/src/shared_files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,15 @@ use openssl::pkey::Public;
use openssl::rsa::Rsa;
use openssl::sign::Verifier;
use regex::Regex;
use reqwest;
use sha2::{Digest, Sha256, Sha512};
use std::collections::HashMap;
use std::fmt;
use std::fs;
use std::io::BufRead;
use std::io::Read;
use std::path::Path;
use std::str::FromStr;
use warp::Buf;

pub enum HashType {
Sha256,
Expand Down Expand Up @@ -89,49 +92,15 @@ impl HashType {

#[derive(Debug)]
pub struct Metadata {
header: String,
algorithm: String,
digest: String,
hash_value: String,
short_pubkey: String,
hostname: String,
keydate: String,
keyid: String,
expires: String,
}

impl FromStr for Metadata {
type Err = Error;

fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Metadata {
header: parse_value("header", s).unwrap(),
algorithm: parse_value("algorithm", s).unwrap(),
digest: parse_value("digest", s).unwrap(),
hash_value: parse_value("hash_value", s).unwrap(),
short_pubkey: parse_value("short_pubkey", s).unwrap(),
hostname: parse_value("hostname", s).unwrap(),
keydate: parse_value("keydate", s).unwrap(),
keyid: parse_value("keyid", s).unwrap(),
expires: parse_value("expires", s).unwrap(),
})
}
}

impl Metadata {
pub fn new(hashmap: HashMap<String, String>) -> Result<Self, Error> {
Ok(Metadata {
header: hashmap.get("header").unwrap().to_string(),
algorithm: hashmap.get("algorithm").unwrap().to_string(),
digest: hashmap.get("digest").unwrap().to_string(),
hash_value: hashmap.get("hash_value").unwrap().to_string(),
short_pubkey: hashmap.get("short_pubkey").unwrap().to_string(),
hostname: hashmap.get("hostname").unwrap().to_string(),
keydate: hashmap.get("keydate").unwrap().to_string(),
keyid: hashmap.get("keyid").unwrap().to_string(),
expires: hashmap.get("expires").unwrap().to_string(),
})
}
pub header: String,
pub algorithm: String,
pub digest: String,
pub hash_value: String,
pub short_pubkey: String,
pub hostname: String,
pub keydate: String,
pub keyid: String,
pub expires: String,
}

impl fmt::Display for Metadata {
Expand All @@ -154,6 +123,27 @@ impl fmt::Display for Metadata {
}
}

impl FromStr for Metadata {
type Err = Error;

fn from_str(s: &str) -> Result<Self, Self::Err> {
if parse_value("header", s).unwrap() != "rudder-signature-v1" {
return Err(Error::InvalidHeader);
}
Ok(Metadata {
header: "rudder-signature-v1".to_string(),
algorithm: parse_value("algorithm", s).unwrap(),
digest: parse_value("digest", s).unwrap(),
hash_value: parse_value("hash_value", s).unwrap(),
short_pubkey: parse_value("short_pubkey", s).unwrap(),
hostname: parse_value("hostname", s).unwrap(),
keydate: parse_value("keydate", s).unwrap(),
keyid: parse_value("keyid", s).unwrap(),
expires: parse_value("expires", s).unwrap(),
})
}
}

pub fn parse_value(key: &str, file: &str) -> Result<String, ()> {
let regex_key = Regex::new(&format!(r"{}=(?P<key>[^\n]+)\n", key)).unwrap();

Expand Down Expand Up @@ -198,7 +188,7 @@ pub fn validate_signature(
}

pub fn parse_ttl(ttl: String) -> Result<i64, Error> {
let regex_numbers = Regex::new(r"^(?:(?P<days>\d+)(?:d|days))?\s*(?:(?P<hours>\d+)(?:h|hours))?\s*(?:(?P<minutes>\d+)(?:m|minutes))?\s*(?:(?P<seconds>\d+)(?:s|seconds))?").unwrap();
let regex_numbers = Regex::new(r"^(?:(?P<days>\d+)(?:d|days|day))?\s*(?:(?P<hours>\d+)(?:h|hours|hour))?\s*(?:(?P<minutes>\d+)(?:m|minutes|minute))?\s*(?:(?P<seconds>\d+)(?:s|seconds|second))?").unwrap();

fn parse_time<'t>(cap: &regex::Captures<'t>, n: &str) -> Result<i64, Error> {
Ok(match cap.name(n) {
Expand All @@ -225,12 +215,38 @@ pub fn parse_ttl(ttl: String) -> Result<i64, Error> {
}
}

pub fn metadata_parser(buf: &mut warp::body::FullBody) -> Vec<String> {
let mut metadata = Vec::new();

let reader = buf.reader();

for line in reader.lines() {
let mytmpstr = line.unwrap();
if mytmpstr != "" {
metadata.push(mytmpstr);
} else {
break;
}
}
metadata
}

pub fn file_writer(buf: &mut warp::body::FullBody, path: &str) {
let mut myvec: Vec<u8> = vec![];

buf.by_ref().reader().consume(1); // skip the line feed
buf.reader().read_to_end(&mut myvec).unwrap();

fs::write(format!("shared-files/{}", path), myvec).expect("Unable to write file");
}

pub fn metadata_writer(metadata_string: String, peek: &str) {
let myvec: Vec<String> = peek.split('/').map(|s| s.to_string()).collect();
let (target_uuid, source_uuid, _file_id) = (&myvec[0], &myvec[1], &myvec[2]);
let _ = fs::create_dir_all(format!("./{}/{}/", target_uuid, source_uuid)); // on cree les folders s'ils existent pas
//fs::create_dir_all(format!("/var/rudder/configuration-repository/shared-files/{}/{}/", target_uuid, source_uuid)); // real path
fs::write(format!("./{}", peek), metadata_string).expect("Unable to write file");
let _ = fs::create_dir_all(format!("shared-files/{}/{}/", target_uuid, source_uuid)); // on cree les folders s'ils existent pas
//fs::create_dir_all(format!("/var/rudder/configuration-repository/shared-files/{}/{}/", target_uuid, source_uuid)); // real path
fs::write(format!("shared-files/{}.metadata", peek), metadata_string)
.expect("Unable to write file");
}

pub fn metadata_hash_checker(filename: String, hash: String) -> hyper::StatusCode {
Expand All @@ -255,13 +271,48 @@ pub fn metadata_hash_checker(filename: String, hash: String) -> hyper::StatusCod
StatusCode::from_u16(404).unwrap()
}

pub fn parse_hash_from_raw(raw: String) -> String {
pub fn parse_parameter_from_raw(raw: String) -> String {
raw.split('=')
.map(|s| s.to_string())
.filter(|s| s != "hash")
.filter(|s| s != "hash" || s != "ttl")
.collect::<String>()
}

#[cfg(test)]
mod tests {
use super::*;
use openssl::sign::Signer;

#[test]
pub fn it_writes_the_metadata() {
let metadata = Metadata {
header: "rudder-signature-v1".to_string(),
algorithm: "sha256".to_string(),
digest: "8ca9efc5752e133e2e80e2661c176fa50f".to_string(),
hash_value: "a75fda39a7af33eb93ab1c74874dcf66d5761ad30977368cf0c4788cf5bfd34f"
.to_string(),
short_pubkey: "shortpubkey".to_string(),
hostname: "ubuntu-18-04-64".to_string(),
keydate: "2018-10-3118:21:43.653257143".to_string(),
keyid: "B29D02BB".to_string(),
expires: "1d 1h".to_string(),
};

assert_eq!(format!("{}", metadata), format!("header=rudder-signature-v1\nalgorithm=sha256\ndigest=8ca9efc5752e133e2e80e2661c176fa50f\nhash_value=a75fda39a7af33eb93ab1c74874dcf66d5761ad30977368cf0c4788cf5bfd34f\nshort_pubkey=shortpubkey\nhostname=ubuntu-18-04-64\nkeydate=2018-10-3118:21:43.653257143\nkeyid=B29D02BB\nexpires={}\n", parse_ttl("1d 1h".to_string()).unwrap()));
}

pub fn send_file(file_id: String, source_uuid: String, target_uuid: String) {
let file_test = fs::File::open("target/tmp/test_send_file.txt").unwrap();
let client = reqwest::Client::new();
let _res = client
.put(&format!(
"https://relay/rudder/relay-api/shared-files/{}/{}/{}",
target_uuid, source_uuid, file_id
))
.body(file_test)
.send();
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down Expand Up @@ -319,4 +370,26 @@ z5VEb9yx2KikbWyChM1Akp82AV5BzqE80QIBIw==".to_string()).unwrap(),

assert!(validate_signature(data, keypub, HashType::Sha512, &signature).unwrap());
}

#[test]
pub fn it_validates_signatures() {
// Generate a keypair
let k0 = Rsa::generate(2048).unwrap();
let k0pkey = k0.public_key_to_pem().unwrap();
let k1 = Rsa::public_key_from_pem(&k0pkey).unwrap();

let keypriv = PKey::from_rsa(k0).unwrap();
let keypub = PKey::from_rsa(k1).unwrap();

let data = b"hello, world!";

// Sign the data
let mut signer = Signer::new(HashType::Sha512.to_openssl_hash(), &keypriv).unwrap();
signer.update(data).unwrap();

let signature = signer.sign_to_vec().unwrap();

assert!(validate_signature(data, keypub, HashType::Sha512, &signature).unwrap());
}

}

0 comments on commit 1d936f1

Please sign in to comment.