Skip to content

Commit

Permalink
adding tag
Browse files Browse the repository at this point in the history
  • Loading branch information
HedayatAbedijoo committed Dec 18, 2020
1 parent 2ef6eb9 commit 0372cd9
Show file tree
Hide file tree
Showing 13 changed files with 251 additions and 72 deletions.
Binary file modified dna/peershare.dna.gz
Binary file not shown.
5 changes: 4 additions & 1 deletion dna/tests/package.json
Expand Up @@ -4,7 +4,10 @@
"description": "",
"main": "index.js",
"scripts": {
"test": "TRYORAMA_LOG_LEVEL=info RUST_LOG=error RUST_BACKTRACE=1 TRYORAMA_HOLOCHAIN_PATH=\"holochain\" ts-node src/index.ts"
"test": "npm run test:setup && npm run test:execute",
"test:setup": "CARGO_TARGET_DIR=../target cargo build --release --target wasm32-unknown-unknown && dna-util -c ../peershare.dna.workdir",
"test:execute": "TRYORAMA_LOG_LEVEL=info RUST_LOG=error RUST_BACKTRACE=1 TRYORAMA_HOLOCHAIN_PATH=\"holochain\" ts-node src/index.ts"

},
"author": "",
"license": "ISC",
Expand Down
66 changes: 60 additions & 6 deletions dna/tests/src/index.ts
Expand Up @@ -21,7 +21,7 @@ const conductorHapps: InstallAgentsHapps = [
];

const orchestrator = new Orchestrator();

/*
orchestrator.registerScenario(
"Scenario: Tag Validation",
async (s: ScenarioApi, t) => {
Expand Down Expand Up @@ -72,15 +72,69 @@ orchestrator.registerScenario(
t.ok(test1);
t.deepEqual(test1.result, false);
_log(test1, "tag_file_result");
}
);
*/
orchestrator.registerScenario(
"Scenario: Test Upload and Tag",
async (s: ScenarioApi, t) => {
const [alice, bob] = await s.players([conductorConfig, conductorConfig]);
const [alice_test_happ] = await alice.installAgentsHapps(conductorHapps);
const Tag_ZOME_NAME = "tags";
const FileStorage_Zome_Name = "file_storage";
const PeerShare_Zome_Name = "peershare";
const conductor = alice_test_happ[0].cells[0];

/// All valid Tags.
test1 = await alice_test_happ[0].cells[0].call(Tag_ZOME_NAME, "tag_file", {
file_hash: Dummy_Hash_File,
tags: ["qwe", "jfhgnburtg", "FGRFS"],
const fileMetadata = dummy_file_metadata();
let fileHash = await conductor.call(
FileStorage_Zome_Name,
"create_file_metadata",
fileMetadata
);
t.ok(fileHash);
_log(fileHash, "file created");

/// upload a new meta_data file
let test1 = await conductor.call(Tag_ZOME_NAME, "tag_file", {
file_hash: fileHash,
tags: ["movie"],
});
t.ok(test1);
t.deepEqual(test1.result, true);
_log(test1, "tag_file_result");
_log(test1, "file_tagged");

///// Test link to my address is working.
let myfilesResult = await conductor.call(
Tag_ZOME_NAME,
"get_my_files",
null
);

t.ok(myfilesResult);
t.deepEqual(myfilesResult.list, 1);
_log(myfilesResult, "all my files");
}
);

orchestrator.registerScenario(
"Scenario: Test new extension function to file_storage module",
async (s: ScenarioApi, t) => {
const [alice, bob] = await s.players([conductorConfig, conductorConfig]);
const [alice_test_happ] = await alice.installAgentsHapps(conductorHapps);
const Tag_ZOME_NAME = "tags";
const FileStorage_Zome_Name = "file_storage";
const PeerShare_Zome_Name = "peershare";
const conductor = alice_test_happ[0].cells[0];

/////
let file_stortage_test = await conductor.call(
FileStorage_Zome_Name,
"new_extention_function",
null
);

t.ok(file_stortage_test);
_log(file_stortage_test, "test file storage new function");
}
);

Expand Down
18 changes: 18 additions & 0 deletions dna/zomes/file_storage/src/lib.rs
@@ -1 +1,19 @@
use hdk3::prelude::*;

extern crate file_storage;

// TODO1: link from my_address to new uploaded file should be happened here. "all_my_files"
// post_commit

/// TODO2: since the link of "all_my_files" created in this zome, get_all_my_files() zome function shoule be implemented here

///Example: Adding extra functionality to the external module
#[derive(Serialize, Deserialize, SerializedBytes)]
pub struct FilesResult {
pub list: usize,
}

#[hdk_extern]
fn new_extention_function(_: ()) -> ExternResult<FilesResult> {
Ok(FilesResult { list: 123 })
}
2 changes: 1 addition & 1 deletion dna/zomes/peershare/Cargo.toml
Expand Up @@ -14,4 +14,4 @@ derive_more = "0"
serde = "1"

hc_utils = "0"
hdk3 = "0"
hdk3 = "0"
38 changes: 36 additions & 2 deletions dna/zomes/peershare/src/lib.rs
@@ -1,6 +1,5 @@
use hdk3::prelude::*;

// mod search;
mod search;

pub fn error<T>(reason: &str) -> ExternResult<T> {
Err(HdkError::Wasm(WasmError::Zome(String::from(reason))))
Expand All @@ -18,3 +17,38 @@ pub fn error<T>(reason: &str) -> ExternResult<T> {
// filter_boundry_in_min:input.filter_boundry_in_min // the end point of filtering, by passing via this variable.
// })
// }

// let links = get_links(env.clone(), base_address, zome_name, link_tag).await;
// let links = links
// .into_inner()
// .into_iter()
// .map(|h| h.target.try_into().unwrap())
// .collect::<Vec<EntryHash>>();

// #[hdk_extern]
// pub fn get_my_files(_:())
//pub fn someting() -> FileMetaData {}

// #[derive(Serialize, Deserialize, SerializedBytes)]
// pub struct FilesResult {
// pub list: usize,
// }
// #[hdk_extern]
// fn get_my_files2(_: ()) -> ExternResult<FilesResult> {
// let _linktag = link_tag("all_my_files").unwrap();
// let links = get_links(my_address(), None)?.into_inner().len();
// Ok(FilesResult { list: links })
// }

// #[derive(Serialize, Deserialize, SerializedBytes)]
// struct StringLinkTag(String);
// pub fn link_tag(tag: &str) -> ExternResult<LinkTag> {
// let sb: SerializedBytes = StringLinkTag(tag.into()).try_into()?;
// Ok(LinkTag(sb.bytes().clone()))
// }

// fn my_address() -> EntryHash {
// let agent_info = agent_info().unwrap();
// let agent_address: AnyDhtHash = agent_info.agent_initial_pubkey.into();
// agent_address.into()
// }
37 changes: 20 additions & 17 deletions dna/zomes/peershare/src/search/entry.rs
@@ -1,26 +1,29 @@
//TODO: finish this function
use hdk3::prelude::*;
#[derive(Serialize, Deserialize, SerializedBytes)]
struct SearchInput{
tags:Vec<String>,
from_timestamp:i64, // search from this time-stamp
filter_boundry_in_min: i64 // filter by last X minutes. UI show Year,Month,Day,Hour, Min and calculate the minutes before calling zome
struct SearchInput {
tags: Vec<String>,
from_timestamp: i64, // search from this time-stamp
filter_boundry_in_min: i64, // filter by last X minutes. UI show Year,Month,Day,Hour, Min and calculate the minutes before calling zome
}

#[derive(Serialize, Deserialize, SerializedBytes)]
struct SearchResult{
result:Vec<FileInfo>,
from_timestamp: i64,
filter_boundry_in_min:i64,
msg:String,
status:bool
struct SearchResult {
result: Vec<FileInfo>,
from_timestamp: i64,
filter_boundry_in_min: i64,
msg: String,
status: bool,
}

#[derive(Serialize, Deserialize, SerializedBytes)]
struct FileInfo{
hash:EntryHash,
file_name:String,
file_size:u32,
owner:String
struct FileInfo {
hash: EntryHash,
file_name: String,
file_size: u32,
owner: String,
}


#[derive(Serialize, Deserialize, SerializedBytes)]
pub struct MyFilesResult {
pub list: Vec<EntryHash>,
}
67 changes: 30 additions & 37 deletions dna/zomes/peershare/src/search/handlers.rs
@@ -1,52 +1,45 @@
const MAX_RESULT_IN_PAGE: i32 = 50; /// approximate records in each page. We should query DHT in a loop whether we reach the filter_boundry or mx number in each page(scroll)
const MAX_RESULT_IN_PAGE: i32 = 50;
/// approximate records in each page. We should query DHT in a loop whether we reach the filter_boundry or mx number in each page(scroll)
const STEP_IN_MIN: i32 = 1; // each page or step of search is X minute.

pub fn search_file_by_tags(tags:Vec<String>,current:u64, end_of_filter:u64) -> Result<(Vec<FileInfo>,u64)>{
pub fn search_file_by_tags(
tags: Vec<String>,
current: u64,
end_of_filter: u64,
) -> Result<(Vec<FileInfo>, u64)> {
let mut start_pointer = current.clone();

let mut start_pointer = current.clone();
/// current time: 14:52 create index: upload file: art.20201212.1452
/// art.20201212.1350
///art.20201212.1410
/// art.20201212.1423
//art.20201212.1452

/// 12.12.2020 art 75 minutes current time: 15:37

/// current time: 14:52 create index: upload file: art.20201212.1452
/// art.20201212.1350
///art.20201212.1410
/// art.20201212.1423
//art.20201212.1452

/// art.20201212.1537 load all if result<50
//// art.20201212.1536 load all if result<50
//// art.20201212.1535 load all if result>50

//current 1535 return reulst;

/// 12.12.2020 art 75 minutes current time: 15:37

/// art.20201212.1537 load all if result<50
//// art.20201212.1536 load all if result<50
//// art.20201212.1535 load all if result>50



//current 1535 return reulst;




/// 1- get all path art.20201212 ///what does it mean,
/// art.20201212 /// remail .

/// 1- possibilities:
//// 2 - slicing inside possibilities.
/// 3 - consition, based on result and time distance
/// 1- get all path art.20201212 ///what does it mean,
/// art.20201212 /// remail .

/// 1- possibilities:
//// 2 - slicing inside possibilities.
/// 3 - consition, based on result and time distance
// Start While: result.lenght<= MAX_RESULT_IN_PAGE || start_pointer<=end_of_filter

let result:Vec<String>;
let result: Vec<String>;
let path_date = "20201206".into(); // get YearMonthDay in this format YYYYMMDD based on the start_pointer
let path_time = "1312".into(); // get time in this format HHMM based on the start_pointer

for s in tags{
// Gnerate TAGS: (format!("{}.{}.{}",s,path_date,path_time));
// Query each Tag from DHT and push to result
for s in tags {
// Gnerate TAGS: (format!("{}.{}.{}",s,path_date,path_time));
// Query each Tag from DHT and push to result
}
start_pointer=STEP_IN_MIN;
start_pointer = STEP_IN_MIN;

// END While
Ok((result,start_pointer))

}
Ok((result, start_pointer))
}
4 changes: 2 additions & 2 deletions dna/zomes/peershare/src/search/mod.rs
@@ -1,3 +1,3 @@
pub mod entry;
pub mod handlers;
pub mod validation;
//pub mod handlers;
//pub mod validation;
16 changes: 13 additions & 3 deletions dna/zomes/tags/src/lib.rs
@@ -1,9 +1,7 @@
//use hdk3::prelude::*;
//use hc_utils::WrappedEntryHash;
use crate::tag::entry::{CreateTagInput, TagResult};
use hdk3::prelude::*;
mod tag;

//use hc_utils::WrappedEntryHash;
#[hdk_extern]
pub fn get_agent_pubkey(_: ()) -> ExternResult<AgentPubKey> {
let agent_info = agent_info()?;
Expand All @@ -14,3 +12,15 @@ pub fn get_agent_pubkey(_: ()) -> ExternResult<AgentPubKey> {
pub fn tag_file(input: CreateTagInput) -> ExternResult<TagResult> {
return Ok(tag::handlers::create_tags(input));
}

#[derive(Serialize, Deserialize, SerializedBytes)]
pub struct MyFilesResult {
pub list: usize,
}

#[hdk_extern]
fn get_my_files(_: ()) -> ExternResult<MyFilesResult> {
let _linktag = tag::handlers::link_tag(tag::handlers::ALL_MY_FILES).unwrap();
let links = get_links(tag::handlers::my_address(), None)?.into_inner();
Ok(MyFilesResult { list: links.len() })
}

0 comments on commit 0372cd9

Please sign in to comment.