diff --git a/Cargo.lock b/Cargo.lock index e73fc44ae..2d9bb857f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2657,7 +2657,7 @@ dependencies = [ [[package]] name = "holochain_scaffolding_cli" -version = "0.3000.0-dev.8" +version = "0.3000.0-dev.9" dependencies = [ "anyhow", "build-fs-tree", diff --git a/Cargo.toml b/Cargo.toml index 95eb44581..4290a0698 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] edition = "2021" name = "holochain_scaffolding_cli" -version = "0.3000.0-dev.8" +version = "0.3000.0-dev.9" description = "CLI to easily generate and modify holochain apps" license = "CAL-1.0" homepage = "https://developer.holochain.org" diff --git a/flake.lock b/flake.lock index 215bfd67a..09a32bdd1 100644 --- a/flake.lock +++ b/flake.lock @@ -173,11 +173,11 @@ ] }, "locked": { - "lastModified": 1715140843, - "narHash": "sha256-O8sNQcPkA0ohfz7qu2cisI/dNAler73l+4FK2ad32Ew=", + "lastModified": 1715841103, + "narHash": "sha256-U1LjCL+/hPaErM/6tFsHft1mOy0vDk1tUT//Y6/pO4c=", "owner": "holochain", "repo": "holochain", - "rev": "e772a89cddcb9e18187d24e06b704d26a5c6aa15", + "rev": "51f73098c1e561dc4d922423f2e1326378fbc44b", "type": "github" }, "original": { @@ -254,11 +254,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1714906307, - "narHash": "sha256-UlRZtrCnhPFSJlDQE7M0eyhgvuuHBTe1eJ9N9AQlJQ0=", + "lastModified": 1715534503, + "narHash": "sha256-5ZSVkFadZbFP1THataCaSf0JH2cAH3S29hU9rrxTEqk=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "25865a40d14b3f9cf19f19b924e2ab4069b09588", + "rev": "2057814051972fa1453ddfb0d98badbea9b83c06", "type": "github" }, "original": { @@ -332,11 +332,11 @@ ] }, "locked": { - "lastModified": 1715048276, - "narHash": "sha256-SqWSTvCjNBBnV/WIQdaxVi5V9H3VJ7cOJAxPQdR1TBY=", + "lastModified": 1715825775, + "narHash": "sha256-7np2/EEr5Xm8IuKWQ43q8AA1Lb6Us2BW6rYMxGrInIg=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "b037d65c988421b54024e62691eace4f2fe623bc", + "rev": "55f468b3d49c5d3321e85f2f9b1158476a2a90fb", "type": "github" }, "original": { @@ -348,11 +348,11 @@ "scaffolding": { "flake": false, "locked": { - "lastModified": 1714147981, - "narHash": "sha256-PIWouOg4W2jVFjsHexAqOCdnwy6gQgll7kWRHssiid0=", + "lastModified": 1715149489, + "narHash": "sha256-P9wgn574Cn5iAZNEAEh6+bnpCn4h+GrWxxad+3bmByM=", "owner": "holochain", "repo": "scaffolding", - "rev": "6582621e73b5127ed863b13f5648d9a333542623", + "rev": "bd044cd0819717f50c5efdb90c9ce5b1ecd6c0bc", "type": "github" }, "original": { @@ -386,11 +386,11 @@ }, "locked": { "dir": "versions/0_3_rc", - "lastModified": 1715140843, - "narHash": "sha256-O8sNQcPkA0ohfz7qu2cisI/dNAler73l+4FK2ad32Ew=", + "lastModified": 1715841103, + "narHash": "sha256-U1LjCL+/hPaErM/6tFsHft1mOy0vDk1tUT//Y6/pO4c=", "owner": "holochain", "repo": "holochain", - "rev": "e772a89cddcb9e18187d24e06b704d26a5c6aa15", + "rev": "51f73098c1e561dc4d922423f2e1326378fbc44b", "type": "github" }, "original": { diff --git a/src/cli.rs b/src/cli.rs index 1e77504c9..030829ada 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -9,8 +9,7 @@ use crate::scaffold::collection::{scaffold_collection, CollectionType}; use crate::scaffold::dna::{scaffold_dna, DnaFileTree}; use crate::scaffold::entry_type::crud::{parse_crud, Crud}; use crate::scaffold::entry_type::definitions::{ - parse_entry_type_reference, parse_referenceable, Cardinality, EntryTypeReference, - FieldDefinition, FieldType, Referenceable, + Cardinality, EntryTypeReference, FieldDefinition, FieldType, Referenceable, }; use crate::scaffold::entry_type::{fields::parse_fields, scaffold_entry_type}; use crate::scaffold::example::{choose_example, Example}; @@ -46,7 +45,7 @@ use structopt::StructOpt; pub struct HcScaffold { #[structopt(short, long)] /// The template to use for the scaffold command - /// Can either be an option from the built-in templates: "vanilla", "vue", "lit", "svelte" + /// Can either be an option from the built-in templates: "vanilla", "vue", "lit", "svelte", "headless" /// Or a path to a custom template template: Option, @@ -160,11 +159,11 @@ pub enum HcScaffoldCommand { /// Name of the integrity zome in which you want to scaffold the link type zome: Option, - #[structopt(parse(try_from_str = parse_referenceable))] + #[structopt(parse(try_from_str = Referenceable::from_str))] /// Entry type (or agent role) used as the base for the links from_referenceable: Option, - #[structopt(parse(try_from_str = parse_referenceable))] + #[structopt(parse(try_from_str = Referenceable::from_str))] /// Entry type (or agent role) used as the target for the links to_referenceable: Option, @@ -196,7 +195,7 @@ pub enum HcScaffoldCommand { /// Collection name, just to differentiate it from other collections collection_name: Option, - #[structopt(parse(try_from_str = parse_entry_type_reference))] + #[structopt(parse(try_from_str = EntryTypeReference::from_str))] /// Entry type that is going to be added to the collection entry_type: Option, @@ -204,7 +203,6 @@ pub enum HcScaffoldCommand { /// Skips UI generation for this collection. no_ui: bool, }, - Example { /// Name of the example to scaffold. One of ['hello-world', 'forum']. example: Option, @@ -217,44 +215,37 @@ pub enum HcScaffoldCommand { impl HcScaffold { pub async fn run(self) -> anyhow::Result<()> { let current_dir = std::env::current_dir()?; - let template_config = if let Some(t) = &self.template { - // Only read from config if the template is inbuilt and not a path - if Path::new(t).exists() { - None - } else { - get_template_config(¤t_dir)? - } - } else { - None - }; + let template_config = get_template_config(¤t_dir)?; let template = match (&template_config, &self.template) { (Some(config), Some(template)) if &config.template != template => { return Err(ScaffoldError::InvalidArguments(format!( - "The value {template} passed with `--template` does not match the template the web-app was scaffolded with: {}", - config.template + "The value {} passed with `--template` does not match the template the web-app was scaffolded with: {}", + template.italic(), + config.template.italic(), )).into()) } - (Some(config), _) => Some(&config.template), + // Only read from config if the template is inbuilt and not a path + (Some(config), _) if !Path::new(&config.template).exists() => Some(&config.template), (_, t) => t.as_ref(), }; + // Given a template either passed via the --template flag or retreived via the hcScaffold config, + // get the template file tree and the ui framework name or custom template path let (template, template_file_tree) = match template { - Some(template) => { - let template_name_or_path; - let file_tree = match template.as_str() { - "lit" | "svelte" | "vanilla" | "vue" => { - let ui_framework = UiFramework::from_str(template)?; - template_name_or_path = ui_framework.to_string(); - ui_framework.template_filetree()? - } - custom_template_path => { - template_name_or_path = custom_template_path.to_string(); - let templates_dir = current_dir.join(PathBuf::from(custom_template_path)); - load_directory_into_memory(&templates_dir)? - } - }; - (template_name_or_path.to_owned(), file_tree) - } + Some(template) => match template.to_lowercase().as_str() { + "lit" | "svelte" | "vanilla" | "vue" | "headless" => { + let ui_framework = UiFramework::from_str(template)?; + (ui_framework.name(), ui_framework.template_filetree()?) + } + custom_template_path if Path::new(custom_template_path).exists() => { + let templates_dir = current_dir.join(PathBuf::from(custom_template_path)); + ( + custom_template_path.to_string(), + load_directory_into_memory(&templates_dir)?, + ) + } + path => return Err(ScaffoldError::PathNotFound(PathBuf::from(path)).into()), + }, None => { let ui_framework = match self.command { HcScaffoldCommand::WebApp { .. } => UiFramework::choose()?, @@ -267,7 +258,7 @@ impl HcScaffold { UiFramework::try_from(&file_tree)? } }; - (ui_framework.to_string(), ui_framework.template_filetree()?) + (ui_framework.name(), ui_framework.template_filetree()?) } }; diff --git a/src/file_tree.rs b/src/file_tree.rs index b6236e7e6..58b4eca83 100644 --- a/src/file_tree.rs +++ b/src/file_tree.rs @@ -1,3 +1,4 @@ +use anyhow::Context; use build_fs_tree::{dir, file, Build, FileSystemTree, MergeableFileSystemTree}; use ignore::WalkBuilder; use include_dir::Dir; @@ -183,23 +184,24 @@ fn find_map_files_rec Option>( found_files } -pub fn map_rust_files ScaffoldResult + Clone>( +pub fn map_rust_files ScaffoldResult + Copy>( file_tree: &mut FileTree, map_fn: F, ) -> ScaffoldResult<()> { - map_all_files(file_tree, |file_path, s| { + map_all_files(file_tree, |file_path, contents| { if let Some(extension) = file_path.extension() { if extension == "rs" { - let rust_file: syn::File = syn::parse_str(s.as_str()).map_err(|e| { - ScaffoldError::MalformedFile(file_path.clone(), format!("{}", e)) - })?; - let new_file = map_fn(file_path, rust_file)?; - - return Ok(unparse(&new_file)); + let original_file: syn::File = syn::parse_str(&contents) + .map_err(|e| ScaffoldError::MalformedFile(file_path.clone(), e.to_string()))?; + let new_file = map_fn(file_path, original_file.clone())?; + // Only reformat the file via unparse if the contents of the newly modified + // file are different from the original + if new_file != original_file { + return Ok(unparse(&new_file)); + } } } - - Ok(s) + Ok(contents) }) } @@ -238,7 +240,7 @@ pub fn unflatten_file_tree( Ok(file_tree) } -pub fn map_all_files ScaffoldResult + Clone>( +pub fn map_all_files ScaffoldResult + Copy>( file_tree: &mut FileTree, map_fn: F, ) -> ScaffoldResult<()> { @@ -246,24 +248,26 @@ pub fn map_all_files ScaffoldResult + Clone>( Ok(()) } -fn map_all_files_rec ScaffoldResult + Clone>( +fn map_all_files_rec ScaffoldResult + Copy>( file_tree: &mut FileTree, current_path: PathBuf, map_fn: F, ) -> ScaffoldResult<()> { - if let Some(c) = file_tree.dir_content_mut() { - for (key, mut tree) in c.clone().into_iter() { + if let Some(dir) = file_tree.dir_content_mut() { + for (key, mut tree) in dir.clone().into_iter() { let child_path = current_path.join(&key); - match tree.clone() { - FileTree::Directory(_dir_contents) => { - map_all_files_rec(&mut tree, child_path, map_fn.clone())?; + match &tree { + FileTree::Directory(_) => { + map_all_files_rec(&mut tree, child_path, map_fn)?; } FileTree::File(file_contents) => { - *tree.file_content_mut().unwrap() = map_fn(child_path, file_contents)?; + *tree + .file_content_mut() + .context("Failed to get mutable reference of file tree")? = + map_fn(child_path, file_contents.to_owned())?; } } - - c.insert(key.clone(), tree.clone()); + dir.insert(key, tree); } } diff --git a/src/reserved_words.rs b/src/reserved_words.rs index 51900b688..6a31b5fe2 100644 --- a/src/reserved_words.rs +++ b/src/reserved_words.rs @@ -1,5 +1,3 @@ -use convert_case::{Case, Casing}; - use crate::error::{ScaffoldError, ScaffoldResult}; const RESERVED_WORDS: [&str; 27] = [ @@ -32,16 +30,15 @@ const RESERVED_WORDS: [&str; 27] = [ "Call", ]; -// Returns an error if the given string is invalid due to it being a reserved word +/// Returns an error if the given string is invalid due to it being a reserved word pub fn check_for_reserved_words(string_to_check: &str) -> ScaffoldResult<()> { - for w in RESERVED_WORDS { - if string_to_check - .to_case(Case::Lower) - .eq(&w.to_string().to_case(Case::Lower)) - { - return Err(ScaffoldError::InvalidReservedWord(w.to_string())); - } + if RESERVED_WORDS + .iter() + .any(|w| string_to_check.eq_ignore_ascii_case(w)) + { + return Err(ScaffoldError::InvalidReservedWord( + string_to_check.to_string(), + )); } - Ok(()) } diff --git a/src/scaffold/collection/coordinator.rs b/src/scaffold/collection/coordinator.rs index ab0fd3006..28eec0796 100644 --- a/src/scaffold/collection/coordinator.rs +++ b/src/scaffold/collection/coordinator.rs @@ -231,6 +231,22 @@ fn add_delete_link_in_delete_function( )); } CollectionType::ByAuthor => { + delete_link_stmts.insert( + 0, + r#" + let record = match details {{ + Details::Record(details) => Ok(details.record), + _ => Err(wasm_error!(WasmErrorInner::Guest(String::from( + "Malformed get details response" + )))), + }}?; + "# + .to_string(), + ); + delete_link_stmts.insert(0, format!(r#" + let details = get_details(original_{snake_case_entry_type}_hash.clone(), GetOptions::default())? + .ok_or(wasm_error!(WasmErrorInner::Guest(String::from("{{pascal_entry_def_name}} not found"))))?; + "#)); delete_link_stmts.push(format!( r#"let links = get_links( GetLinksInputBuilder::try_new(record.action().author().clone(), LinkTypes::{link_type_name})?.build() @@ -249,8 +265,8 @@ fn add_delete_link_in_delete_function( }; let stmts = delete_link_stmts - .into_iter() - .map(|s| syn::parse_str::(s.as_str())) + .iter() + .map(|s| syn::parse_str::(s)) .collect::, syn::Error>>()?; let crate_src_path = zome_file_tree.zome_crate_path.join("src"); @@ -270,8 +286,8 @@ fn add_delete_link_in_delete_function( file.items = file .items .into_iter() - .map(|i| { - if let syn::Item::Fn(mut item_fn) = i.clone() { + .map(|item| { + if let syn::Item::Fn(mut item_fn) = item.clone() { if item_fn .attrs .iter() @@ -288,7 +304,7 @@ fn add_delete_link_in_delete_function( } } - i + item }) .collect(); diff --git a/src/scaffold/entry_type.rs b/src/scaffold/entry_type.rs index a8ce41c51..d8c8afd92 100644 --- a/src/scaffold/entry_type.rs +++ b/src/scaffold/entry_type.rs @@ -95,11 +95,10 @@ pub fn scaffold_entry_type( .iter() .map(|s| s.to_os_string()) .collect(); - let empty_dir = dir! {}; choose_fields( name, &zome_file_tree, - template_file_tree.path(&mut v.iter()).unwrap_or(&empty_dir), + template_file_tree.path(&mut v.iter()).unwrap_or(&dir! {}), no_ui, )? } @@ -145,7 +144,7 @@ pub fn scaffold_entry_type( .filter_map(|f| f.linked_from.clone()) .collect(); - for l in linked_from.clone() { + for l in linked_from { zome_file_tree = add_link_type_to_integrity_zome( zome_file_tree, &link_type_name(&l, &entry_def.referenceable()), diff --git a/src/scaffold/entry_type/coordinator.rs b/src/scaffold/entry_type/coordinator.rs index b6aa31127..753741eda 100644 --- a/src/scaffold/entry_type/coordinator.rs +++ b/src/scaffold/entry_type/coordinator.rs @@ -25,28 +25,32 @@ pub fn no_update_read_handler(entry_def: &EntryDefinition) -> String { match entry_def.referenceable().hash_type() { FieldType::ActionHash => format!( - r#"#[hdk_extern] -pub fn get_{snake_entry_def_name}({snake_entry_def_name}_hash: {hash_type}) -> ExternResult> {{ - let Some(details) = get_details({snake_entry_def_name}_hash, GetOptions::default())? else {{ - return Ok(None); - }}; - match details {{ - Details::Record(details) => Ok(Some(details.record)), - _ => Err(wasm_error!(WasmErrorInner::Guest("Malformed get details response".to_string()))), - }} -}}"#, + r#" + #[hdk_extern] + pub fn get_{snake_entry_def_name}({snake_entry_def_name}_hash: {hash_type}) -> ExternResult> {{ + let Some(details) = get_details({snake_entry_def_name}_hash, GetOptions::default())? else {{ + return Ok(None); + }}; + match details {{ + Details::Record(details) => Ok(Some(details.record)), + _ => Err(wasm_error!(WasmErrorInner::Guest("Malformed get details response".to_string()))), + }} + }} + "#, ), FieldType::EntryHash => format!( - r#"#[hdk_extern] -pub fn get_{snake_entry_def_name}({snake_entry_def_name}_hash: {hash_type}) -> ExternResult> {{ - let Some(details) = get_details({snake_entry_def_name}_hash, GetOptions::default())? else {{ - return Ok(None); - }}; - match details {{ - Details::Entry(details) => Ok(Some(Record::new(details.actions[0].clone(), Some(details.entry)))), - _ => Err(wasm_error!(WasmErrorInner::Guest("Malformed get details response".to_string()))), - }} -}}"#, + r#" + #[hdk_extern] + pub fn get_{snake_entry_def_name}({snake_entry_def_name}_hash: {hash_type}) -> ExternResult> {{ + let Some(details) = get_details({snake_entry_def_name}_hash, GetOptions::default())? else {{ + return Ok(None); + }}; + match details {{ + Details::Entry(details) => Ok(Some(Record::new(details.actions[0].clone(), Some(details.entry)))), + _ => Err(wasm_error!(WasmErrorInner::Guest("Malformed get details response".to_string()))), + }} + }} + "#, ), _ => String::new(), } @@ -56,53 +60,54 @@ pub fn read_handler_without_linking_to_updates(entry_def: &EntryDefinition) -> S let snake_entry_def_name = entry_def.name.clone(); format!( - r#"#[hdk_extern] -pub fn get_original_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ - let Some(details) = get_details(original_{snake_entry_def_name}_hash, GetOptions::default())? else {{ - return Ok(None); - }}; - match details {{ - Details::Record(details) => Ok(Some(details.record)), - _ => Err(wasm_error!(WasmErrorInner::Guest("Malformed get details response".to_string()))), - }} -}} - -#[hdk_extern] -pub fn get_latest_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ - let Some(details) = get_details(original_{snake_entry_def_name}_hash, GetOptions::default())? else {{ - return Ok(None); - }}; - - let record_details = match details {{ - Details::Entry(_) => Err(wasm_error!(WasmErrorInner::Guest( - "Malformed details".into() - ))), - Details::Record(record_details) => Ok(record_details) - }}?; - - match record_details.updates.last() {{ - Some(update) => get_latest_{snake_entry_def_name}(update.action_address().clone()), - None => Ok(Some(record_details.record)), - }} -}} + r#" + #[hdk_extern] + pub fn get_original_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ + let Some(details) = get_details(original_{snake_entry_def_name}_hash, GetOptions::default())? else {{ + return Ok(None); + }}; + match details {{ + Details::Record(details) => Ok(Some(details.record)), + _ => Err(wasm_error!(WasmErrorInner::Guest("Malformed get details response".to_string()))), + }} + }} + + #[hdk_extern] + pub fn get_latest_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ + let Some(details) = get_details(original_{snake_entry_def_name}_hash, GetOptions::default())? else {{ + return Ok(None); + }}; + + let record_details = match details {{ + Details::Entry(_) => Err(wasm_error!(WasmErrorInner::Guest( + "Malformed details".into() + ))), + Details::Record(record_details) => Ok(record_details) + }}?; + + match record_details.updates.last() {{ + Some(update) => get_latest_{snake_entry_def_name}(update.action_address().clone()), + None => Ok(Some(record_details.record)), + }} + }} -#[hdk_extern] -pub fn get_all_revisions_for_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ - let Some(Details::Record(details)) = get_details(original_{snake_entry_def_name}_hash, GetOptions::default())? else {{ - return Ok(vec![]); - }}; + #[hdk_extern] + pub fn get_all_revisions_for_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ + let Some(Details::Record(details)) = get_details(original_{snake_entry_def_name}_hash, GetOptions::default())? else {{ + return Ok(vec![]); + }}; - let mut records = vec![details.record]; + let mut records = vec![details.record]; - for update in details.updates {{ - let mut update_records = get_all_revisions_for_{snake_entry_def_name}(update.action_address().clone())?; + for update in details.updates {{ + let mut update_records = get_all_revisions_for_{snake_entry_def_name}(update.action_address().clone())?; - records.append(&mut update_records); - }} + records.append(&mut update_records); + }} - Ok(records) -}} -"#, + Ok(records) + }} + "#, ) } @@ -110,66 +115,67 @@ pub fn updates_link_name(entry_def_name: &str) -> String { format!("{}Updates", entry_def_name.to_case(Case::Pascal)) } -pub fn read_handler_with_linking_to_updates(entry_def_name: &str) -> String { - let snake_entry_def_name = entry_def_name.to_case(Case::Snake); - format!( - r#"#[hdk_extern] -pub fn get_latest_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ - let links = get_links( - GetLinksInputBuilder::try_new(original_{snake_entry_def_name}_hash.clone(), LinkTypes::{})?.build(), - )?; - - let latest_link = links.into_iter().max_by(|link_a, link_b| link_a.timestamp.cmp(&link_b.timestamp)); - - let latest_{snake_entry_def_name}_hash = match latest_link {{ - Some(link) => link.target.clone().into_action_hash().ok_or(wasm_error!( - WasmErrorInner::Guest("No action hash associated with link".to_string()) - ))?, - None => original_{snake_entry_def_name}_hash.clone() - }}; - - get(latest_{snake_entry_def_name}_hash, GetOptions::default()) -}} - -#[hdk_extern] -pub fn get_original_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ - let Some(details) = get_details(original_{snake_entry_def_name}_hash, GetOptions::default())? else {{ - return Ok(None); - }}; - match details {{ - Details::Record(details) => Ok(Some(details.record)), - _ => Err(wasm_error!(WasmErrorInner::Guest("Malformed get details response".to_string()))), - }} -}} +pub fn read_handler_with_linking_to_updates(entry_def: &EntryDefinition) -> String { + let snake_entry_def_name = entry_def.snake_case_name(); + let updates_link_name = updates_link_name(&entry_def.name); -#[hdk_extern] -pub fn get_all_revisions_for_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ - let Some(original_record) = get_original_{snake_entry_def_name}(original_{snake_entry_def_name}_hash.clone())? else {{ - return Ok(vec![]); - }}; + format!( + r#" + #[hdk_extern] + pub fn get_latest_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ + let links = get_links( + GetLinksInputBuilder::try_new(original_{snake_entry_def_name}_hash.clone(), LinkTypes::{updates_link_name})?.build(), + )?; + + let latest_link = links.into_iter().max_by(|link_a, link_b| link_a.timestamp.cmp(&link_b.timestamp)); + + let latest_{snake_entry_def_name}_hash = match latest_link {{ + Some(link) => link.target.clone().into_action_hash().ok_or(wasm_error!( + WasmErrorInner::Guest("No action hash associated with link".to_string()) + ))?, + None => original_{snake_entry_def_name}_hash.clone() + }}; + + get(latest_{snake_entry_def_name}_hash, GetOptions::default()) + }} - let links = get_links( - GetLinksInputBuilder::try_new(original_{snake_entry_def_name}_hash.clone(), LinkTypes::{})?.build(), - )?; + #[hdk_extern] + pub fn get_original_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ + let Some(details) = get_details(original_{snake_entry_def_name}_hash, GetOptions::default())? else {{ + return Ok(None); + }}; + match details {{ + Details::Record(details) => Ok(Some(details.record)), + _ => Err(wasm_error!(WasmErrorInner::Guest("Malformed get details response".to_string()))), + }} + }} - let get_input: Vec = links - .into_iter() - .map(|link| Ok(GetInput::new( - link.target.into_action_hash().ok_or(wasm_error!(WasmErrorInner::Guest("No action hash associated with link".to_string())))?.into(), - GetOptions::default(), - ))) - .collect::>>()?; - - // load the records for all the links - let records = HDK.with(|hdk| hdk.borrow().get(get_input))?; - let mut records: Vec = records.into_iter().flatten().collect(); - records.insert(0, original_record); - - Ok(records) -}} -"#, - updates_link_name(entry_def_name), - updates_link_name(entry_def_name), + #[hdk_extern] + pub fn get_all_revisions_for_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult> {{ + let Some(original_record) = get_original_{snake_entry_def_name}(original_{snake_entry_def_name}_hash.clone())? else {{ + return Ok(vec![]); + }}; + + let links = get_links( + GetLinksInputBuilder::try_new(original_{snake_entry_def_name}_hash.clone(), LinkTypes::{updates_link_name})?.build(), + )?; + + let get_input: Vec = links + .into_iter() + .map(|link| Ok(GetInput::new( + link.target.into_action_hash().ok_or(wasm_error!(WasmErrorInner::Guest("No action hash associated with link".to_string())))?.into(), + GetOptions::default(), + ))) + .collect::>>()?; + + // load the records for all the links + let records = HDK.with(|hdk| hdk.borrow().get(get_input))?; + let mut records: Vec = records.into_iter().flatten().collect(); + records.insert(0, original_record); + + Ok(records) + }} + "#, ) } @@ -179,41 +185,38 @@ pub fn create_link_for_cardinality( link_type_name: &str, cardinality: &Cardinality, ) -> String { + let snake_entry_def_name = entry_def.snake_case_name(); let link_target = match entry_def.reference_entry_hash { - true => format!("{}_entry_hash", entry_def.name.to_case(Case::Snake)), - false => format!("{}_hash", entry_def.name.to_case(Case::Snake)), + true => format!("{snake_entry_def_name}_entry_hash",), + false => format!("{snake_entry_def_name}_hash",), }; match cardinality { Cardinality::Single => format!( - r#" create_link({}.{}.clone(), {}.clone(), LinkTypes::{}, ())?;"#, - entry_def.name.to_case(Case::Snake), - field_name, - link_target, - link_type_name + r#" + create_link({snake_entry_def_name}.{field_name}.clone(), {link_target}.clone(), LinkTypes::{link_type_name}, ())?; + "#, ), Cardinality::Option => format!( - r#" if let Some(base) = {}.{}.clone() {{ - create_link(base, {}.clone(), LinkTypes::{}, ())?; - }}"#, - entry_def.name.to_case(Case::Snake), - field_name, - link_target, - link_type_name + r#" + if let Some(base) = {snake_entry_def_name}.{field_name}.clone() {{ + create_link(base, {link_target}.clone(), LinkTypes::{link_type_name}, ())?; + }} + "#, ), Cardinality::Vector => format!( - r#" for base in {}.{}.clone() {{ - create_link(base, {}.clone(), LinkTypes::{}, ())?; - }}"#, - entry_def.name.to_case(Case::Snake), - field_name, - link_target, - link_type_name + r#" + for base in {snake_entry_def_name}.{field_name}.clone() {{ + create_link(base, {link_target}.clone(), LinkTypes::{link_type_name}, ())?; + }} + "#, ), } } pub fn create_handler(entry_def: &EntryDefinition) -> String { + let snake_entry_def_name = entry_def.snake_case_name(); + let pascal_entry_def_name = entry_def.pascal_case_name(); let linked_from_count = entry_def .fields .iter() @@ -221,9 +224,7 @@ pub fn create_handler(entry_def: &EntryDefinition) -> String { .count(); let mut create_links_str: Vec = match entry_def.reference_entry_hash { true if linked_from_count > 0 => vec![format!( - r#"let {}_entry_hash = hash_entry(&{})?;"#, - entry_def.name.to_case(Case::Snake), - entry_def.name.to_case(Case::Snake) + "let {snake_entry_def_name}_entry_hash = hash_entry(&{snake_entry_def_name})?;", )], _ => vec![], }; @@ -242,104 +243,81 @@ pub fn create_handler(entry_def: &EntryDefinition) -> String { let create_links_str = create_links_str.join("\n\n"); format!( - r#"#[hdk_extern] -pub fn create_{}({}: {}) -> ExternResult {{ - let {}_hash = create_entry(&EntryTypes::{}({}.clone()))?; -{} - - let record = get({}_hash.clone(), GetOptions::default())? - .ok_or(wasm_error!(WasmErrorInner::Guest("Could not find the newly created {}".to_string())))?; - - Ok(record) -}} -"#, - entry_def.name.to_case(Case::Snake), - entry_def.name.to_case(Case::Snake), - entry_def.name.to_case(Case::Pascal), - entry_def.name.to_case(Case::Snake), - entry_def.name.to_case(Case::Pascal), - entry_def.name.to_case(Case::Snake), - create_links_str, - entry_def.name.to_case(Case::Snake), - entry_def.name.to_case(Case::Pascal) + r#" + #[hdk_extern] + pub fn create_{snake_entry_def_name}({snake_entry_def_name}: {pascal_entry_def_name}) -> ExternResult {{ + let {snake_entry_def_name}_hash = create_entry(&EntryTypes::{pascal_entry_def_name}({snake_entry_def_name}.clone()))?; + {create_links_str} + + let record = get({snake_entry_def_name}_hash.clone(), GetOptions::default())? + .ok_or(wasm_error!(WasmErrorInner::Guest("Could not find the newly created {pascal_entry_def_name}".to_string())))?; + Ok(record) + }} + "#, ) } -pub fn update_handler(entry_def_name: &str, link_from_original_to_each_update: bool) -> String { +pub fn update_handler( + entry_def: &EntryDefinition, + link_from_original_to_each_update: bool, +) -> String { match link_from_original_to_each_update { - true => update_handler_linking_on_each_update(entry_def_name), - false => update_handler_without_linking_on_each_update(entry_def_name), + true => update_handler_linking_on_each_update(entry_def), + false => update_handler_without_linking_on_each_update(entry_def), } } -pub fn update_handler_without_linking_on_each_update(entry_def_name: &str) -> String { +pub fn update_handler_without_linking_on_each_update(entry_def: &EntryDefinition) -> String { + let snake_entry_def_name = entry_def.snake_case_name(); + let pascal_entry_def_name = entry_def.pascal_case_name(); + format!( - r#"#[derive(Serialize, Deserialize, Debug)] -pub struct Update{}Input {{ - pub previous_{}_hash: ActionHash, - pub updated_{}: {} -}} - -#[hdk_extern] -pub fn update_{}(input: Update{}Input) -> ExternResult {{ - let updated_{}_hash = update_entry(input.previous_{}_hash, &input.updated_{})?; - - let record = get(updated_{}_hash.clone(), GetOptions::default())? - .ok_or(wasm_error!(WasmErrorInner::Guest("Could not find the newly updated {}".to_string())))?; - - Ok(record) -}} -"#, - entry_def_name.to_case(Case::Pascal), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Pascal), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Pascal), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Pascal) + r#" + #[derive(Serialize, Deserialize, Debug)] + pub struct Update{pascal_entry_def_name}Input {{ + pub previous_{snake_entry_def_name}_hash: ActionHash, + pub updated_{snake_entry_def_name}: {pascal_entry_def_name} + }} + + #[hdk_extern] + pub fn update_{snake_entry_def_name}(input: Update{pascal_entry_def_name}Input) -> ExternResult {{ + let updated_{snake_entry_def_name}_hash = update_entry(input.previous_{snake_entry_def_name}_hash, &input.updated_{snake_entry_def_name})?; + + let record = get(updated_{snake_entry_def_name}_hash.clone(), GetOptions::default())? + .ok_or(wasm_error!(WasmErrorInner::Guest("Could not find the newly updated {pascal_entry_def_name}".to_string())))?; + + Ok(record) + }} + "#, ) } -pub fn update_handler_linking_on_each_update(entry_def_name: &str) -> String { +pub fn update_handler_linking_on_each_update(entry_def: &EntryDefinition) -> String { + let snake_entry_def_name = entry_def.snake_case_name(); + let pascal_entry_def_name = entry_def.pascal_case_name(); + let link_type_variant_name = updates_link_name(&entry_def.name); + format!( - r#"#[derive(Serialize, Deserialize, Debug)] -pub struct Update{}Input {{ - pub original_{}_hash: ActionHash, - pub previous_{}_hash: ActionHash, - pub updated_{}: {} -}} - -#[hdk_extern] -pub fn update_{}(input: Update{}Input) -> ExternResult {{ - let updated_{}_hash = update_entry(input.previous_{}_hash.clone(), &input.updated_{})?; - - create_link(input.original_{}_hash.clone(), updated_{}_hash.clone(), LinkTypes::{}, ())?; - - let record = get(updated_{}_hash.clone(), GetOptions::default())? - .ok_or(wasm_error!(WasmErrorInner::Guest("Could not find the newly updated {}".to_string())))?; - - Ok(record) -}} -"#, - entry_def_name.to_case(Case::Pascal), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Pascal), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Pascal), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Snake), - updates_link_name(entry_def_name), - entry_def_name.to_case(Case::Snake), - entry_def_name.to_case(Case::Pascal) + r#" + #[derive(Serialize, Deserialize, Debug)] + pub struct Update{pascal_entry_def_name}Input {{ + pub original_{snake_entry_def_name}_hash: ActionHash, + pub previous_{snake_entry_def_name}_hash: ActionHash, + pub updated_{snake_entry_def_name}: {pascal_entry_def_name} + }} + + #[hdk_extern] + pub fn update_{snake_entry_def_name}(input: Update{pascal_entry_def_name}Input) -> ExternResult {{ + let updated_{snake_entry_def_name}_hash = update_entry(input.previous_{snake_entry_def_name}_hash.clone(), &input.updated_{snake_entry_def_name})?; + + create_link(input.original_{snake_entry_def_name}_hash.clone(), updated_{snake_entry_def_name}_hash.clone(), LinkTypes::{link_type_variant_name}, ())?; + + let record = get(updated_{snake_entry_def_name}_hash.clone(), GetOptions::default())? + .ok_or(wasm_error!(WasmErrorInner::Guest("Could not find the newly updated {pascal_entry_def_name}".to_string())))?; + + Ok(record) + }} + "#, ) } @@ -355,31 +333,21 @@ pub fn delete_handler(entry_def: &EntryDefinition) -> String { .collect(); let delete_depending_links = match linked_from_fields.is_empty() { - true => format!( - r#" - let details = get_details(original_{snake_entry_def_name}_hash.clone(), GetOptions::default())? - .ok_or(wasm_error!(WasmErrorInner::Guest("{{pascal_entry_def_name}} not found".to_string())))?; - let record = match details {{ - Details::Record(details) => Ok(details.record), - _ => Err(wasm_error!(WasmErrorInner::Guest("Malformed get details response".to_string()))), - }}?; - "# - ), + true => Default::default(), false => { let mut delete_links = format!( r#" - let details = get_details(original_{snake_entry_def_name}_hash.clone(), GetOptions::default())? - .ok_or(wasm_error!(WasmErrorInner::Guest(String::from("{{pascal_entry_def_name}} not found"))))?; - let record = match details {{ - Details::Record(details) => Ok(details.record), - _ => Err(wasm_error!(WasmErrorInner::Guest(String::from( - "Malformed get details response" - )))), - }}?; - let entry = record.entry().as_option().ok_or(wasm_error!(WasmErrorInner::Guest("{pascal_entry_def_name} record has no entry".to_string())))?; - let {snake_entry_def_name} = {pascal_entry_def_name}::try_from(entry)?; - - "# + let details = get_details(original_{snake_entry_def_name}_hash.clone(), GetOptions::default())? + .ok_or(wasm_error!(WasmErrorInner::Guest(String::from("{{pascal_entry_def_name}} not found"))))?; + let record = match details {{ + Details::Record(details) => Ok(details.record), + _ => Err(wasm_error!(WasmErrorInner::Guest(String::from( + "Malformed get details response" + )))), + }}?; + let entry = record.entry().as_option().ok_or(wasm_error!(WasmErrorInner::Guest("{pascal_entry_def_name} record has no entry".to_string())))?; + let {snake_entry_def_name} = {pascal_entry_def_name}::try_from(entry)?; + "# ); for linked_from_field in linked_from_fields { let linked_from = linked_from_field @@ -390,49 +358,49 @@ pub fn delete_handler(entry_def: &EntryDefinition) -> String { let delete_this_link = match linked_from_field.cardinality { Cardinality::Single => format!( r#" - let links = get_links( - GetLinksInputBuilder::try_new({snake_entry_def_name}.{field_name}.clone(), LinkTypes::{link_type})?.build(), - )?; - for link in links {{ - if let Some(action_hash) = link.target.into_action_hash() {{ - if action_hash.eq(&original_{snake_entry_def_name}_hash) {{ - delete_link(link.create_link_hash)?; - }} - }} - }} - "# + let links = get_links( + GetLinksInputBuilder::try_new({snake_entry_def_name}.{field_name}.clone(), LinkTypes::{link_type})?.build(), + )?; + for link in links {{ + if let Some(action_hash) = link.target.into_action_hash() {{ + if action_hash.eq(&original_{snake_entry_def_name}_hash) {{ + delete_link(link.create_link_hash)?; + }} + }} + }} + "# ), Cardinality::Option => format!( r#" - if let Some(base_address) = {snake_entry_def_name}.{field_name}.clone() {{ - let links = get_links( - GetLinksInputBuilder::try_new(base_address, LinkTypes::{link_type})?.build(), - )?; - for link in links {{ - if let Some(action_hash) = link.target.into_action_hash() {{ - if action_hash.eq(&original_{snake_entry_def_name}_hash) {{ - delete_link(link.create_link_hash)?; - }} - }} - }} - }} - "# + if let Some(base_address) = {snake_entry_def_name}.{field_name}.clone() {{ + let links = get_links( + GetLinksInputBuilder::try_new(base_address, LinkTypes::{link_type})?.build(), + )?; + for link in links {{ + if let Some(action_hash) = link.target.into_action_hash() {{ + if action_hash.eq(&original_{snake_entry_def_name}_hash) {{ + delete_link(link.create_link_hash)?; + }} + }} + }} + }} + "# ), Cardinality::Vector => format!( r#" - for base_address in {snake_entry_def_name}.{field_name} {{ - let links = get_links( - GetLinksInputBuilder::try_new(base_address.clone(), LinkTypes::{link_type})?.build(), - )?; - for link in links {{ - if let Some(action_hash) = link.target.into_action_hash() {{ - if action_hash.eq(&original_{snake_entry_def_name}_hash) {{ - delete_link(link.create_link_hash)?; - }} - }} - }} - }} - "# + for base_address in {snake_entry_def_name}.{field_name} {{ + let links = get_links( + GetLinksInputBuilder::try_new(base_address.clone(), LinkTypes::{link_type})?.build(), + )?; + for link in links {{ + if let Some(action_hash) = link.target.into_action_hash() {{ + if action_hash.eq(&original_{snake_entry_def_name}_hash) {{ + delete_link(link.create_link_hash)?; + }} + }} + }} + }} + "# ), }; delete_links.push_str(delete_this_link.as_str()); @@ -441,41 +409,42 @@ pub fn delete_handler(entry_def: &EntryDefinition) -> String { } }; format!( - r#"#[hdk_extern] -pub fn delete_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult {{ - {delete_depending_links} - delete_entry(original_{snake_entry_def_name}_hash) -}} - -#[hdk_extern] -pub fn get_all_deletes_for_{snake_entry_def_name}( - original_{snake_entry_def_name}_hash: ActionHash, -) -> ExternResult>> {{ - let Some(details) = get_details(original_{snake_entry_def_name}_hash, GetOptions::default())? else {{ - return Ok(None); - }}; - - match details {{ - Details::Entry(_) => Err(wasm_error!(WasmErrorInner::Guest( - "Malformed details".into() - ))), - Details::Record(record_details) => Ok(Some(record_details.deletes)), - }} -}} + r#" + #[hdk_extern] + pub fn delete_{snake_entry_def_name}(original_{snake_entry_def_name}_hash: ActionHash) -> ExternResult {{ + {delete_depending_links} + delete_entry(original_{snake_entry_def_name}_hash) + }} + + #[hdk_extern] + pub fn get_all_deletes_for_{snake_entry_def_name}( + original_{snake_entry_def_name}_hash: ActionHash, + ) -> ExternResult>> {{ + let Some(details) = get_details(original_{snake_entry_def_name}_hash, GetOptions::default())? else {{ + return Ok(None); + }}; + + match details {{ + Details::Entry(_) => Err(wasm_error!(WasmErrorInner::Guest( + "Malformed details".into() + ))), + Details::Record(record_details) => Ok(Some(record_details.deletes)), + }} + }} -#[hdk_extern] -pub fn get_oldest_delete_for_{snake_entry_def_name}( - original_{snake_entry_def_name}_hash: ActionHash, -) -> ExternResult> {{ - let Some(mut deletes) = get_all_deletes_for_{snake_entry_def_name}(original_{snake_entry_def_name}_hash)? else {{ - return Ok(None); - }}; + #[hdk_extern] + pub fn get_oldest_delete_for_{snake_entry_def_name}( + original_{snake_entry_def_name}_hash: ActionHash, + ) -> ExternResult> {{ + let Some(mut deletes) = get_all_deletes_for_{snake_entry_def_name}(original_{snake_entry_def_name}_hash)? else {{ + return Ok(None); + }}; - deletes.sort_by(|delete_a, delete_b| delete_a.action().timestamp().cmp(&delete_b.action().timestamp())); + deletes.sort_by(|delete_a, delete_b| delete_a.action().timestamp().cmp(&delete_b.action().timestamp())); - Ok(deletes.first().cloned()) -}} -"#, + Ok(deletes.first().cloned()) + }} + "#, ) } @@ -486,11 +455,12 @@ fn initial_crud_handlers( link_from_original_to_each_update: bool, ) -> String { let mut initial = format!( - r#"use hdk::prelude::*; -use {}::*; + r#" + use hdk::prelude::*; + use {}::*; -{} -"#, + {} + "#, integrity_zome_name, create_handler(entry_def) ); @@ -498,14 +468,15 @@ use {}::*; if !crud.update { initial.push_str(no_update_read_handler(entry_def).as_str()); } else if link_from_original_to_each_update { - initial.push_str(read_handler_with_linking_to_updates(&entry_def.name).as_str()); + initial.push_str(read_handler_with_linking_to_updates(entry_def).as_str()); } else { initial.push_str(read_handler_without_linking_to_updates(entry_def).as_str()); } + if crud.update { - initial - .push_str(update_handler(&entry_def.name, link_from_original_to_each_update).as_str()); + initial.push_str(update_handler(entry_def, link_from_original_to_each_update).as_str()); } + if crud.delete { initial.push_str(delete_handler(entry_def).as_str()); } @@ -544,23 +515,29 @@ fn signal_action_has_entry_types(expr_match: &syn::ExprMatch) -> bool { fn signal_entry_types_variants() -> ScaffoldResult> { Ok(vec![ syn::parse_str::( - "EntryCreated { - action: SignedActionHashed, - app_entry: EntryTypes, - }", + r#" + EntryCreated { + action: SignedActionHashed, + app_entry: EntryTypes, + } + "#, )?, syn::parse_str::( - "EntryUpdated { - action: SignedActionHashed, - app_entry: EntryTypes, - original_app_entry: EntryTypes, - }", + r#" + EntryUpdated { + action: SignedActionHashed, + app_entry: EntryTypes, + original_app_entry: EntryTypes, + } + "#, )?, syn::parse_str::( - "EntryDeleted { - action: SignedActionHashed, - original_app_entry: EntryTypes, - }", + r#" + EntryDeleted { + action: SignedActionHashed, + original_app_entry: EntryTypes, + } + "#, )?, ]) } @@ -614,6 +591,7 @@ pub fn add_crud_functions_to_coordinator( ) -> ScaffoldResult { let dna_manifest_path = zome_file_tree.dna_file_tree.dna_manifest_path.clone(); let zome_manifest = zome_file_tree.zome_manifest.clone(); + let entry_def_snake_case_name = entry_def.snake_case_name(); // 1. Create an ENTRY_DEF_NAME.rs in "src/", with the appropriate crud functions let crate_src_path = zome_file_tree.zome_crate_path.join("src"); @@ -621,7 +599,7 @@ pub fn add_crud_functions_to_coordinator( let mut file_tree = zome_file_tree.dna_file_tree.file_tree(); insert_file( &mut file_tree, - &crate_src_path.join(format!("{}.rs", entry_def.name.to_case(Case::Snake))), + &crate_src_path.join(format!("{}.rs", &entry_def_snake_case_name)), &initial_crud_handlers( integrity_zome_name, entry_def, @@ -631,24 +609,23 @@ pub fn add_crud_functions_to_coordinator( )?; // 2. Add this file as a module in the entry point for the crate - let lib_rs_path = crate_src_path.join("lib.rs"); - map_file(&mut file_tree, &lib_rs_path, |s| { + map_file(&mut file_tree, &lib_rs_path, |contents| { Ok(format!( - r#"pub mod {}; + r#" + pub mod {}; -{}"#, - entry_def.name.to_case(Case::Snake), - s + {contents} + "#, + &entry_def_snake_case_name )) })?; - let v: Vec = crate_src_path - .clone() + let v = crate_src_path .iter() .map(|s| s.to_os_string()) - .collect(); + .collect::>(); map_rust_files( file_tree .path_mut(&mut v.iter()) @@ -659,9 +636,7 @@ pub fn add_crud_functions_to_coordinator( for item in &mut file.items { if let syn::Item::Enum(item_enum) = item { - if item_enum.ident.to_string().eq(&String::from("Signal")) - && !signal_has_entry_types(item_enum) - { + if item_enum.ident.eq("Signal") && !signal_has_entry_types(item_enum) { first_entry_type_scaffolded = true; for v in signal_entry_types_variants()? { item_enum.variants.push(v); @@ -670,12 +645,7 @@ pub fn add_crud_functions_to_coordinator( } if let syn::Item::Fn(item_fn) = item { - if item_fn - .sig - .ident - .to_string() - .eq(&String::from("signal_action")) - { + if item_fn.sig.ident.eq("signal_action") { if find_ending_match_expr_in_block(&mut item_fn.block).is_none() { item_fn.block = Box::new(syn::parse_str::( "{ match action.hashed.content.clone() { _ => Ok(()) } }", @@ -696,27 +666,31 @@ pub fn add_crud_functions_to_coordinator( } if first_entry_type_scaffolded { - file.items.push(syn::parse_str::("fn get_entry_for_action(action_hash: &ActionHash) -> ExternResult> { - let record = match get_details(action_hash.clone(), GetOptions::default())? { - Some(Details::Record(record_details)) => record_details.record, - _ => { return Ok(None); } - }; - let entry = match record.entry().as_option() { - Some(entry) => entry, - None => { return Ok(None); } - }; - - let (zome_index, entry_index) = match record.action().entry_type() { - Some(EntryType::App(AppEntryDef { - zome_index, - entry_index, - .. - })) => (zome_index, entry_index), - _ => { return Ok(None); } - }; - - EntryTypes::deserialize_from_type(*zome_index, *entry_index, entry) -}")?); + file.items.push(syn::parse_str::( + r#" + fn get_entry_for_action(action_hash: &ActionHash) -> ExternResult> { + let record = match get_details(action_hash.clone(), GetOptions::default())? { + Some(Details::Record(record_details)) => record_details.record, + _ => { return Ok(None); } + }; + let entry = match record.entry().as_option() { + Some(entry) => entry, + None => { return Ok(None); } + }; + + let (zome_index, entry_index) = match record.action().entry_type() { + Some(EntryType::App(AppEntryDef { + zome_index, + entry_index, + .. + })) => (zome_index, entry_index), + _ => { return Ok(None); } + }; + + EntryTypes::deserialize_from_type(*zome_index, *entry_index, entry) + } + "#, + )?); } } Ok(file) diff --git a/src/scaffold/entry_type/definitions.rs b/src/scaffold/entry_type/definitions.rs index 28744ec7a..d00af4b98 100644 --- a/src/scaffold/entry_type/definitions.rs +++ b/src/scaffold/entry_type/definitions.rs @@ -1,13 +1,11 @@ +use std::str::FromStr; + use convert_case::{Case, Casing}; use proc_macro2::TokenStream; use quote::{format_ident, quote}; use serde::{ser::SerializeStruct, Deserialize, Serialize, Serializer}; -use crate::{ - error::{ScaffoldError, ScaffoldResult}, - reserved_words::check_for_reserved_words, - utils::check_case, -}; +use crate::{error::ScaffoldError, reserved_words::check_for_reserved_words, utils::check_case}; #[derive(Deserialize, Debug, Clone, Serialize)] #[serde(tag = "type")] @@ -95,6 +93,7 @@ impl FieldType { pub fn rust_type(&self) -> TokenStream { use FieldType::*; + match self { Bool => quote!(bool), String => quote!(String), @@ -230,25 +229,29 @@ impl EntryTypeReference { } } -pub fn parse_entry_type_reference(s: &str) -> ScaffoldResult { - let sp: Vec<&str> = s.split(':').collect(); - check_case(sp[0], "entry type reference", Case::Snake)?; - - let reference_entry_hash = match sp.len() { - 0 | 1 => false, - _ => match sp[1] { - "EntryHash" => true, - "ActionHash" => false, - _ => Err(ScaffoldError::InvalidArguments(String::from( - "second argument for reference type must be \"EntryHash\" or \"ActionHash\"", - )))?, - }, - }; - - Ok(EntryTypeReference { - entry_type: sp[0].to_string().to_case(Case::Pascal), - reference_entry_hash, - }) +impl FromStr for EntryTypeReference { + type Err = ScaffoldError; + + fn from_str(s: &str) -> Result { + let sp: Vec<&str> = s.split(':').collect(); + check_case(sp[0], "entry type reference", Case::Snake)?; + + let reference_entry_hash = match sp.len() { + 0 | 1 => false, + _ => match sp[1] { + "EntryHash" => true, + "ActionHash" => false, + _ => Err(ScaffoldError::InvalidArguments(String::from( + "second argument for reference type must be \"EntryHash\" or \"ActionHash\"", + )))?, + }, + }; + + Ok(EntryTypeReference { + entry_type: sp[0].to_string().to_case(Case::Pascal), + reference_entry_hash, + }) + } } #[derive(Clone, Debug)] @@ -270,22 +273,26 @@ impl Serialize for Referenceable { } } -pub fn parse_referenceable(s: &str) -> ScaffoldResult { - let sp: Vec<&str> = s.split(':').collect(); +impl FromStr for Referenceable { + type Err = ScaffoldError; - check_case(sp[0], "referenceable", Case::Snake)?; + fn from_str(s: &str) -> Result { + let sp: Vec<&str> = s.split(':').collect(); - Ok(match sp[0] { - "agent" => match sp.len() { - 0 | 1 => Referenceable::Agent { - role: String::from("agent"), - }, - _ => Referenceable::Agent { - role: sp[1].to_string(), + check_case(sp[0], "referenceable", Case::Snake)?; + + Ok(match sp[0] { + "agent" => match sp.len() { + 0 | 1 => Referenceable::Agent { + role: String::from("agent"), + }, + _ => Referenceable::Agent { + role: sp[1].to_string(), + }, }, - }, - _ => Referenceable::EntryType(parse_entry_type_reference(s)?), - }) + _ => Referenceable::EntryType(EntryTypeReference::from_str(s)?), + }) + } } impl Referenceable { @@ -332,4 +339,16 @@ impl EntryDefinition { reference_entry_hash: self.reference_entry_hash, }) } + + pub fn snake_case_name(&self) -> String { + self.name.to_case(Case::Snake) + } + + pub fn pascal_case_name(&self) -> String { + self.name.to_case(Case::Pascal) + } + + pub fn camel_case_name(&self) -> String { + self.name.to_case(Case::Camel) + } } diff --git a/src/scaffold/link_type/coordinator.rs b/src/scaffold/link_type/coordinator.rs index e9694982c..46499ef33 100644 --- a/src/scaffold/link_type/coordinator.rs +++ b/src/scaffold/link_type/coordinator.rs @@ -34,37 +34,40 @@ fn metadata_handlers( let pascal_link_type_name = link_type_name.to_case(Case::Pascal); format!( - r#"use hdk::prelude::*; -use {integrity_zome_name}::*; - -#[derive(Serialize, Deserialize, Debug)] -pub struct Add{pascal_link_type_name}For{pascal_from}Input {{ - pub {snake_from_arg}: {from_arg_type}, - pub {snake_link_type_name}: String, -}} -#[hdk_extern] -pub fn add_{snake_link_type_name}_for_{snake_from}(input: Add{pascal_link_type_name}For{pascal_from}Input) -> ExternResult<()> {{ - create_link(input.{snake_from_arg}.clone(), input.{snake_from_arg}, LinkTypes::{pascal_link_type_name}, input.{snake_link_type_name})?; - - Ok(()) -}} - -#[hdk_extern] -pub fn get_{plural_snake_link_type_name}_for_{snake_from}({snake_from_arg}: {from_arg_type}) -> ExternResult> {{ - let links = get_links( - GetLinksInputBuilder::try_new({snake_from_arg}, LinkTypes::{pascal_link_type_name})?.build(), - )?; - - let {snake_link_type_name}: Vec = links - .into_iter() - .map(|link| - String::from_utf8(link.tag.into_inner()) - .map_err(|e| wasm_error!(WasmErrorInner::Guest(format!("Error converting link tag to string: {{:?}}", e)))) - ) - .collect::>>()?; + r#" + use hdk::prelude::*; + use {integrity_zome_name}::*; + + #[derive(Serialize, Deserialize, Debug)] + pub struct Add{pascal_link_type_name}For{pascal_from}Input {{ + pub {snake_from_arg}: {from_arg_type}, + pub {snake_link_type_name}: String, + }} + + #[hdk_extern] + pub fn add_{snake_link_type_name}_for_{snake_from}(input: Add{pascal_link_type_name}For{pascal_from}Input) -> ExternResult<()> {{ + create_link(input.{snake_from_arg}.clone(), input.{snake_from_arg}, LinkTypes::{pascal_link_type_name}, input.{snake_link_type_name})?; - Ok({snake_link_type_name}) -}}"# + Ok(()) + }} + + #[hdk_extern] + pub fn get_{plural_snake_link_type_name}_for_{snake_from}({snake_from_arg}: {from_arg_type}) -> ExternResult> {{ + let links = get_links( + GetLinksInputBuilder::try_new({snake_from_arg}, LinkTypes::{pascal_link_type_name})?.build(), + )?; + + let {snake_link_type_name}: Vec = links + .into_iter() + .map(|link| + String::from_utf8(link.tag.into_inner()) + .map_err(|e| wasm_error!(WasmErrorInner::Guest(format!("Error converting link tag to string: {{:?}}", e)))) + ) + .collect::>>()?; + + Ok({snake_link_type_name}) + }} + "# ) } @@ -93,26 +96,26 @@ pub fn add_link_handler( .to_string(&Cardinality::Single) .to_case(Case::Pascal); - let bidirectional_create = match bidirectional { - true => format!( - r#"create_link(input.target_{to_arg_name}, input.base_{from_arg_name}, LinkTypes::{inverse_link_type_name}, ())?;"# - ), - false => String::new(), - }; + let bidirectional_create = bidirectional.then_some( + format!("create_link(input.target_{to_arg_name}, input.base_{from_arg_name}, LinkTypes::{inverse_link_type_name}, ())?;") + ).unwrap_or_default(); format!( - r#"#[derive(Serialize, Deserialize, Debug)] -pub struct Add{singular_pascal_to_entry_type}For{singular_pascal_from_entry_type}Input {{ - pub base_{from_arg_name}: {from_hash_type}, - pub target_{to_arg_name}: {to_hash_type}, -}} -#[hdk_extern] -pub fn add_{singular_snake_to_entry_type}_for_{singular_snake_from_entry_type}(input: Add{singular_pascal_to_entry_type}For{singular_pascal_from_entry_type}Input) -> ExternResult<()> {{ - create_link(input.base_{from_arg_name}.clone(), input.target_{to_arg_name}.clone(), LinkTypes::{normal_link_type_name}, ())?; - {bidirectional_create} - - Ok(()) -}}"# + r#" + #[derive(Serialize, Deserialize, Debug)] + pub struct Add{singular_pascal_to_entry_type}For{singular_pascal_from_entry_type}Input {{ + pub base_{from_arg_name}: {from_hash_type}, + pub target_{to_arg_name}: {to_hash_type}, + }} + + #[hdk_extern] + pub fn add_{singular_snake_to_entry_type}_for_{singular_snake_from_entry_type}(input: Add{singular_pascal_to_entry_type}For{singular_pascal_from_entry_type}Input) -> ExternResult<()> {{ + create_link(input.base_{from_arg_name}.clone(), input.target_{to_arg_name}.clone(), LinkTypes::{normal_link_type_name}, ())?; + {bidirectional_create} + + Ok(()) + }} + "# ) } @@ -145,39 +148,39 @@ fn get_links_handler_to_agent( .to_string(&Cardinality::Vector) .to_case(Case::Snake); - let get_deleted_links_handler = if delete { - format!( + let get_deleted_links_handler = delete + .then_some(format!( r#" -#[hdk_extern] -pub fn get_deleted_{plural_snake_to_entry_type}_for_{singular_snake_from_entry_type}( - {from_arg_name}: {from_hash_type}, -) -> ExternResult)>> {{ - let details = get_link_details( - {from_arg_name}, - LinkTypes::{pascal_link_type_name}, - None, - GetOptions::default(), - )?; - Ok(details - .into_inner() - .into_iter() - .filter(|(_link, deletes)| !deletes.is_empty()) - .collect()) -}}"# - ) - } else { - String::new() - }; + #[hdk_extern] + pub fn get_deleted_{plural_snake_to_entry_type}_for_{singular_snake_from_entry_type}( + {from_arg_name}: {from_hash_type}, + ) -> ExternResult)>> {{ + let details = get_link_details( + {from_arg_name}, + LinkTypes::{pascal_link_type_name}, + None, + GetOptions::default(), + )?; + Ok(details + .into_inner() + .into_iter() + .filter(|(_link, deletes)| !deletes.is_empty()) + .collect()) + }} + "# + )) + .unwrap_or_default(); format!( - r#"#[hdk_extern] -pub fn get_{plural_snake_to_entry_type}_for_{singular_snake_from_entry_type}({from_arg_name}: {from_hash_type}) -> ExternResult> {{ - get_links( - GetLinksInputBuilder::try_new({from_arg_name}, LinkTypes::{pascal_link_type_name})?.build(), - ) -}} -{get_deleted_links_handler} -"#, + r#" + #[hdk_extern] + pub fn get_{plural_snake_to_entry_type}_for_{singular_snake_from_entry_type}({from_arg_name}: {from_hash_type}) -> ExternResult> {{ + get_links( + GetLinksInputBuilder::try_new({from_arg_name}, LinkTypes::{pascal_link_type_name})?.build(), + ) + }} + {get_deleted_links_handler} + "#, ) } @@ -200,38 +203,39 @@ fn get_links_handler_to_entry( .to_string(&Cardinality::Vector) .to_case(Case::Snake); - let get_deleted_links_handler = match delete { - true => format!( + let get_deleted_links_handler = delete + .then_some(format!( r#" -#[hdk_extern] -pub fn get_deleted_{plural_snake_to_entry_type}_for_{singular_snake_from_entry_type}( - {from_arg_name}: {from_hash_type}, -) -> ExternResult)>> {{ - let details = get_link_details( - {from_arg_name}, - LinkTypes::{pascal_link_type_name}, - None, - GetOptions::default(), - )?; - Ok(details - .into_inner() - .into_iter() - .filter(|(_link, deletes)| !deletes.is_empty()) - .collect()) -}}"# - ), - false => String::new(), - }; + #[hdk_extern] + pub fn get_deleted_{plural_snake_to_entry_type}_for_{singular_snake_from_entry_type}( + {from_arg_name}: {from_hash_type}, + ) -> ExternResult)>> {{ + let details = get_link_details( + {from_arg_name}, + LinkTypes::{pascal_link_type_name}, + None, + GetOptions::default(), + )?; + Ok(details + .into_inner() + .into_iter() + .filter(|(_link, deletes)| !deletes.is_empty()) + .collect()) + }} + "# + )) + .unwrap_or_default(); format!( - r#"#[hdk_extern] -pub fn get_{plural_snake_to_entry_type}_for_{singular_snake_from_entry_type}({from_arg_name}: {from_hash_type}) -> ExternResult> {{ - get_links( - GetLinksInputBuilder::try_new({from_arg_name}, LinkTypes::{pascal_link_type_name})?.build(), - ) -}} -{get_deleted_links_handler} -"#, + r#" + #[hdk_extern] + pub fn get_{plural_snake_to_entry_type}_for_{singular_snake_from_entry_type}({from_arg_name}: {from_hash_type}) -> ExternResult> {{ + get_links( + GetLinksInputBuilder::try_new({from_arg_name}, LinkTypes::{pascal_link_type_name})?.build(), + ) + }} + {get_deleted_links_handler} + "#, ) } @@ -274,44 +278,46 @@ fn remove_link_handlers( let from_link = from_link_hash_type(&to_hash_type); let from_inverse = from_link_hash_type(&from_hash_type); - let bidirectional_remove = match bidirectional { - true => format!( + let bidirectional_remove = bidirectional.then_some( + format!( r#" - let links = get_links( - GetLinksInputBuilder::try_new(input.target_{to_arg_name}.clone(), LinkTypes::{inverse_link_type_name})?.build(), - )?; - - for link in links {{ - if {from_inverse}.eq(&input.base_{from_arg_name}) {{ - delete_link(link.create_link_hash)?; - }} - }}"# - ), - false => String::new(), - }; + let links = get_links( + GetLinksInputBuilder::try_new(input.target_{to_arg_name}.clone(), LinkTypes::{inverse_link_type_name})?.build(), + )?; + + for link in links {{ + if {from_inverse}.eq(&input.base_{from_arg_name}) {{ + delete_link(link.create_link_hash)?; + }} + }} + "# + ) + ).unwrap_or_default(); format!( - r#"#[derive(Serialize, Deserialize, Debug)] -pub struct Remove{singular_pascal_to_entry_type}For{singular_pascal_from_entry_type}Input {{ - pub base_{from_arg_name}: {from_hash_type}, - pub target_{to_arg_name}: {to_hash_type}, -}} -#[hdk_extern] -pub fn remove_{singular_snake_to_entry_type}_for_{singular_snake_from_entry_type}(input: Remove{singular_pascal_to_entry_type}For{singular_pascal_from_entry_type}Input ) -> ExternResult<()> {{ - let links = get_links( - GetLinksInputBuilder::try_new(input.base_{from_arg_name}.clone(), LinkTypes::{pascal_link_type_name})?.build(), - )?; - - for link in links {{ - if {from_link}.eq(&input.target_{to_arg_name}) {{ - delete_link(link.create_link_hash)?; + r#" + #[derive(Serialize, Deserialize, Debug)] + pub struct Remove{singular_pascal_to_entry_type}For{singular_pascal_from_entry_type}Input {{ + pub base_{from_arg_name}: {from_hash_type}, + pub target_{to_arg_name}: {to_hash_type}, }} - }} - {bidirectional_remove} - Ok(()) -}} -"# + #[hdk_extern] + pub fn remove_{singular_snake_to_entry_type}_for_{singular_snake_from_entry_type}(input: Remove{singular_pascal_to_entry_type}For{singular_pascal_from_entry_type}Input ) -> ExternResult<()> {{ + let links = get_links( + GetLinksInputBuilder::try_new(input.base_{from_arg_name}.clone(), LinkTypes::{pascal_link_type_name})?.build(), + )?; + + for link in links {{ + if {from_link}.eq(&input.target_{to_arg_name}) {{ + delete_link(link.create_link_hash)?; + }} + }} + {bidirectional_remove} + + Ok(()) + }} + "# ) } @@ -322,35 +328,37 @@ fn normal_handlers( delete: bool, bidirectional: bool, ) -> String { - let inverse_get = match bidirectional { - true => format!( - r#" + let inverse_get = bidirectional + .then_some(get_links_handler( + to_referenceable, + from_referenceable, + delete, + )) + .unwrap_or_default(); -{}"#, - get_links_handler(to_referenceable, from_referenceable, delete) - ), - false => String::new(), - }; + let delete_link_handler = delete + .then_some(remove_link_handlers( + from_referenceable, + to_referenceable, + bidirectional, + )) + .unwrap_or_default(); - let delete_link_handler = match delete { - true => remove_link_handlers(from_referenceable, to_referenceable, bidirectional), - false => String::new(), - }; + let add_links_handler = add_link_handler(from_referenceable, to_referenceable, bidirectional); + let get_links_handler = get_links_handler(from_referenceable, to_referenceable, delete); format!( - r#"use hdk::prelude::*; -use {integrity_zome_name}::*; + r#" + use hdk::prelude::*; + use {integrity_zome_name}::*; -{} + {add_links_handler} -{} -{} + {get_links_handler} + {inverse_get} -{}"#, - add_link_handler(from_referenceable, to_referenceable, bidirectional), - get_links_handler(from_referenceable, to_referenceable, delete), - inverse_get, - delete_link_handler + {delete_link_handler} + "#, ) } @@ -396,12 +404,13 @@ pub fn add_link_type_functions_to_coordinator( // 2. Add this file as a module in the entry point for the crate - map_file(&mut file_tree, &lib_rs_path, |file| { + map_file(&mut file_tree, &lib_rs_path, |contents| { Ok(format!( - r#"pub mod {}; + r#" + pub mod {snake_link_type_name}; -{}"#, - snake_link_type_name, file + {contents} + "#, )) })?; diff --git a/src/scaffold/web_app.rs b/src/scaffold/web_app.rs index 3120de47d..61c3f57b8 100644 --- a/src/scaffold/web_app.rs +++ b/src/scaffold/web_app.rs @@ -42,15 +42,9 @@ fn web_app_skeleton( .insert(OsString::from("flake.nix"), flake_nix(holo_enabled)); } - let mut scaffold_template_result = + let scaffold_template_result = scaffold_web_app_template(app_file_tree, template_file_tree, &app_name, holo_enabled)?; - scaffold_template_result - .file_tree - .dir_content_mut() - .unwrap() - .insert(OsString::from("dnas"), dir! {}); - Ok(scaffold_template_result) } diff --git a/src/scaffold/web_app/uis.rs b/src/scaffold/web_app/uis.rs index 9e0f2da71..0427882c3 100644 --- a/src/scaffold/web_app/uis.rs +++ b/src/scaffold/web_app/uis.rs @@ -1,3 +1,4 @@ +use colored::Colorize; use dialoguer::{theme::ColorfulTheme, Select}; use include_dir::{include_dir, Dir}; use std::{ffi::OsString, path::PathBuf, str::FromStr}; @@ -11,22 +12,37 @@ static LIT_TEMPLATES: Dir<'static> = include_dir!("$CARGO_MANIFEST_DIR/templates static SVELTE_TEMPLATES: Dir<'static> = include_dir!("$CARGO_MANIFEST_DIR/templates/svelte"); static VUE_TEMPLATES: Dir<'static> = include_dir!("$CARGO_MANIFEST_DIR/templates/vue"); static VANILLA_TEMPLATES: Dir<'static> = include_dir!("$CARGO_MANIFEST_DIR/templates/vanilla"); +static HEADLESS_TEMPLATE: Dir<'static> = include_dir!("$CARGO_MANIFEST_DIR/templates/headless"); -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone)] pub enum UiFramework { Vanilla, Lit, Svelte, Vue, + Headless, } impl UiFramework { + /// Gets the non-ANSI escaped name of the ui framework + pub fn name(&self) -> String { + let name = match self { + UiFramework::Vanilla => "vanilla", + UiFramework::Lit => "lit", + UiFramework::Svelte => "svelte", + UiFramework::Vue => "vue", + UiFramework::Headless => "headless", + }; + name.to_string() + } + pub fn template_filetree(&self) -> ScaffoldResult { let dir = match self { UiFramework::Lit => &LIT_TEMPLATES, UiFramework::Vanilla => &VANILLA_TEMPLATES, UiFramework::Svelte => &SVELTE_TEMPLATES, UiFramework::Vue => &VUE_TEMPLATES, + UiFramework::Headless => &HEADLESS_TEMPLATE, }; dir_to_file_tree(dir) } @@ -37,29 +53,32 @@ impl UiFramework { UiFramework::Svelte, UiFramework::Vue, UiFramework::Vanilla, + UiFramework::Headless, ]; let selection = Select::with_theme(&ColorfulTheme::default()) - .with_prompt("Choose UI framework:") + .with_prompt("Choose UI framework: (Use arrow-keys. Return to submit)") .default(0) .items(&frameworks[..]) .interact()?; - Ok(frameworks[selection]) + Ok(frameworks[selection].clone()) } pub fn choose_non_vanilla() -> ScaffoldResult { let frameworks = [UiFramework::Lit, UiFramework::Svelte, UiFramework::Vue]; let selection = Select::with_theme(&ColorfulTheme::default()) - .with_prompt("Choose UI framework:") + .with_prompt("Choose UI framework: (Use arrow-keys. Return to submit)") .default(0) .items(&frameworks[..]) .interact()?; - Ok(frameworks[selection]) + Ok(frameworks[selection].clone()) } } impl TryFrom<&FileTree> for UiFramework { type Error = ScaffoldError; + /// Try to get ui framework from app file tree, if the ui framework cannot be inferred, then + /// the user will be prompted to choose one via `UiFramework::choose` fn try_from(app_file_tree: &FileTree) -> Result { let ui_package_json_path = PathBuf::from("ui/package.json"); if file_exists(app_file_tree, &ui_package_json_path) { @@ -71,8 +90,8 @@ impl TryFrom<&FileTree> for UiFramework { .path(&mut v.iter()) .ok_or(ScaffoldError::PathNotFound(ui_package_json_path.clone()))? .file_content() - .ok_or(ScaffoldError::PathNotFound(ui_package_json_path.clone()))? - .clone(); + .map(|c| c.to_owned()) + .ok_or(ScaffoldError::PathNotFound(ui_package_json_path.clone()))?; if ui_package_json.contains("lit") { return Ok(UiFramework::Lit); } else if ui_package_json.contains("svelte") { @@ -90,10 +109,11 @@ impl TryFrom<&FileTree> for UiFramework { impl std::fmt::Display for UiFramework { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let str = match self { - UiFramework::Vanilla => "vanilla", - UiFramework::Lit => "lit", - UiFramework::Svelte => "svelte", - UiFramework::Vue => "vue", + UiFramework::Vanilla => "vanilla".yellow(), + UiFramework::Lit => "lit".bright_blue(), + UiFramework::Svelte => "svelte".bright_red(), + UiFramework::Vue => "vue".green(), + UiFramework::Headless => "headless (no ui)".italic(), }; write!(f, "{str}") } @@ -103,15 +123,15 @@ impl FromStr for UiFramework { type Err = ScaffoldError; fn from_str(s: &str) -> ScaffoldResult { - match s { + match s.to_ascii_lowercase().as_str() { "vanilla" => Ok(UiFramework::Vanilla), "svelte" => Ok(UiFramework::Svelte), "vue" => Ok(UiFramework::Vue), "lit" => Ok(UiFramework::Lit), - _ => Err(ScaffoldError::InvalidUiFramework( - s.to_string(), - "vanilla, lit, svelte, vue".to_string(), - )), + "headless" => Ok(UiFramework::Headless), + value => Err(ScaffoldError::MalformedTemplate(format!( + "Invalid value: {value}, expected vanilla, svelte, vue, lit or headless" + ))), } } } diff --git a/templates/custom-template/custom-template/template/web-app/package.json.hbs b/templates/custom-template/custom-template/template/web-app/package.json.hbs index e5d051190..7a705b4f8 100644 --- a/templates/custom-template/custom-template/template/web-app/package.json.hbs +++ b/templates/custom-template/custom-template/template/web-app/package.json.hbs @@ -7,11 +7,11 @@ ], "scripts": { "start": "AGENTS=2 BOOTSTRAP_PORT=$(port) SIGNAL_PORT=$(port) npm run network", - "network": "hc s clean && npm run build:happ && UI_PORT=8888 concurrently \"npm start -w ui\" \"npm run launch:happ\" \"holochain-playground\"", + "network": "hc sandbox clean && npm run build:happ && UI_PORT=8888 concurrently \"npm start -w ui\" \"npm run launch:happ\" \"holochain-playground\"", "test": "npm run build:zomes && hc app pack workdir --recursive && npm t -w tests", "launch:happ": "hc-spin -n $AGENTS --ui-port $UI_PORT workdir/{{app_name}}.happ", "start:tauri": "AGENTS=2 BOOTSTRAP_PORT=$(port) SIGNAL_PORT=$(port) npm run network:tauri", - "network:tauri": "hc s clean && npm run build:happ && UI_PORT=8888 concurrently \"npm start -w ui\" \"npm run launch:tauri\" \"holochain-playground\"", + "network:tauri": "hc sandbox clean && npm run build:happ && UI_PORT=8888 concurrently \"npm start -w ui\" \"npm run launch:tauri\" \"holochain-playground\"", "launch:tauri": "concurrently \"hc run-local-services --bootstrap-port $BOOTSTRAP_PORT --signal-port $SIGNAL_PORT\" \"echo pass | RUST_LOG=warn hc launch --piped -n $AGENTS workdir/{{app_name}}.happ --ui-port $UI_PORT network --bootstrap http://127.0.0.1:\"$BOOTSTRAP_PORT\" webrtc ws://127.0.0.1:\"$SIGNAL_PORT\"\"", {{#if holo_enabled}} "start:holo": "AGENTS=2 npm run network:holo", diff --git a/templates/headless/collection/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{kebab_case collection_name}}.test.ts.hbs b/templates/headless/collection/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{kebab_case collection_name}}.test.ts.hbs new file mode 100644 index 000000000..3aa9d2511 --- /dev/null +++ b/templates/headless/collection/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{kebab_case collection_name}}.test.ts.hbs @@ -0,0 +1,78 @@ +import { assert, test } from "vitest"; + +import { runScenario, dhtSync, CallableCell } from '@holochain/tryorama'; +import { + NewEntryAction, + ActionHash, + Record, + Link, + AppBundleSource, + fakeActionHash, + fakeAgentPubKey, + fakeEntryHash +} from '@holochain/client'; +import { decode } from '@msgpack/msgpack'; + +import { create{{pascal_case referenceable.name}} } from './common.js'; + +test('create a {{pascal_case referenceable.name}} and get {{lower_case collection_name}}', async () => { + await runScenario(async scenario => { + // Construct proper paths for your app. + // This assumes app bundle created by the `hc app pack` command. + const testAppPath = process.cwd() + '/../workdir/{{app_name}}.happ'; + + // Set up the app to be installed + const appSource = { appBundleSource: { path: testAppPath } }; + + // Add 2 players with the test app to the Scenario. The returned players + // can be destructured. + const [alice, bob] = await scenario.addPlayersWithApps([appSource, appSource]); + + // Shortcut peer discovery through gossip and register all agents in every + // conductor of the scenario. + await scenario.shareAllAgents(); + + // Bob gets {{lower_case collection_name}} + let collectionOutput: Link[] = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_{{snake_case collection_name}}", + payload: {{#if (eq collection_type.type "Global")}}null{{else}}alice.agentPubKey{{/if}} + }); + assert.equal(collectionOutput.length, 0); + + // Alice creates a {{pascal_case referenceable.name}} + const createRecord: Record = await create{{pascal_case referenceable.name}}(alice.cells[0]); + assert.ok(createRecord); + + await dhtSync([alice, bob], alice.cells[0].cell_id[0]); + + // Bob gets {{lower_case collection_name}} again + collectionOutput = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_{{snake_case collection_name}}", + payload: {{#if (eq collection_type.type "Global")}}null{{else}}alice.agentPubKey{{/if}} + }); + assert.equal(collectionOutput.length, 1); + assert.deepEqual({{#if (eq referenceable.hash_type "EntryHash")}}(createRecord.signed_action.hashed.content as NewEntryAction).entry_hash{{else}}createRecord.signed_action.hashed.hash{{/if}}, collectionOutput[0].target); +{{#if (and deletable (eq referenceable.hash_type "ActionHash"))}} + + // Alice deletes the {{pascal_case referenceable.name}} + await alice.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "delete_{{snake_case referenceable.name}}", + payload: createRecord.signed_action.hashed.hash + }); + + await dhtSync([alice, bob], alice.cells[0].cell_id[0]); + + // Bob gets {{lower_case collection_name}} again + collectionOutput = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_{{snake_case collection_name}}", + payload: {{#if (eq collection_type.type "Global")}}null{{else}}alice.agentPubKey{{/if}} + }); + assert.equal(collectionOutput.length, 0); +{{/if}} + }); +}); + diff --git a/templates/headless/coordinator-zome/tests/src/{{dna_role_name}}/{{zome_manifest.name}}/common.ts.hbs b/templates/headless/coordinator-zome/tests/src/{{dna_role_name}}/{{zome_manifest.name}}/common.ts.hbs new file mode 100644 index 000000000..e1b810614 --- /dev/null +++ b/templates/headless/coordinator-zome/tests/src/{{dna_role_name}}/{{zome_manifest.name}}/common.ts.hbs @@ -0,0 +1,3 @@ +import { CallableCell } from '@holochain/tryorama'; +import { NewEntryAction, ActionHash, Record, AppBundleSource, fakeActionHash, fakeAgentPubKey, fakeEntryHash, fakeDnaHash } from '@holochain/client'; + diff --git a/templates/headless/entry-type/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/common.ts.hbs b/templates/headless/entry-type/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/common.ts.hbs new file mode 100644 index 000000000..6009ec1a2 --- /dev/null +++ b/templates/headless/entry-type/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/common.ts.hbs @@ -0,0 +1,53 @@ +{{previous_file_content}} + +export async function sample{{pascal_case entry_type.name}}(cell: CallableCell, partial{{pascal_case entry_type.name}} = {}) { + return { + ...{ +{{#each entry_type.fields}} + {{#if linked_from}} + {{#if (ne linked_from.hash_type "AgentPubKey")}} + {{#if (eq cardinality "vector")}} + {{#if (eq (pascal_case linked_from.name) (pascal_case ../entry_type.name))}} + {{field_name}}: [], + {{else}} + {{#if (eq linked_from.hash_type "ActionHash")}} + {{field_name}}: [(await create{{pascal_case linked_from.name}}(cell)).signed_action.hashed.hash], + {{else}} + {{field_name}}: [((await create{{pascal_case linked_from.name}}(cell)).signed_action.hashed.content as NewEntryAction).entry_hash], + {{/if}} + {{/if}} + {{else}} + {{#if (eq (pascal_case linked_from.name) (pascal_case ../entry_type.name))}} + {{field_name}}: null, + {{else}} + {{#if (eq linked_from.hash_type "ActionHash")}} + {{field_name}}: (await create{{pascal_case linked_from.name}}(cell)).signed_action.hashed.hash, + {{else}} + {{field_name}}: ((await create{{pascal_case linked_from.name}}(cell)).signed_action.hashed.content as NewEntryAction).entry_hash, + {{/if}} + {{/if}} + {{/if}} + {{else}} + {{field_name}}: cell.cell_id[1], + {{/if}} + {{else}} + {{#if (eq cardinality "vector")}} + {{field_name}}: [{{> (concat field_type.type "/sample") field_type=field_type}}], + {{else}} + {{field_name}}: {{> (concat field_type.type "/sample") field_type=field_type}}, + {{/if}} + {{/if}} +{{/each}} + }, + ...partial{{pascal_case entry_type.name}} + }; +} + +export async function create{{pascal_case entry_type.name}}(cell: CallableCell, {{camel_case entry_type.name}} = undefined): Promise { + return cell.callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "create_{{snake_case entry_type.name}}", + payload: {{camel_case entry_type.name}} || await sample{{pascal_case entry_type.name}}(cell), + }); +} + diff --git a/templates/headless/entry-type/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{kebab_case entry_type.name}}.test.ts.hbs b/templates/headless/entry-type/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{kebab_case entry_type.name}}.test.ts.hbs new file mode 100644 index 000000000..8d5608c64 --- /dev/null +++ b/templates/headless/entry-type/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{kebab_case entry_type.name}}.test.ts.hbs @@ -0,0 +1,279 @@ +import { assert, test } from "vitest"; + +import { runScenario, dhtSync, CallableCell } from '@holochain/tryorama'; +import { + NewEntryAction, + ActionHash, + Record, + Link, + CreateLink, + DeleteLink, + SignedActionHashed, + AppBundleSource, + fakeActionHash, + fakeAgentPubKey, + fakeEntryHash +} from '@holochain/client'; +import { decode } from '@msgpack/msgpack'; + +import { create{{pascal_case entry_type.name}}, sample{{pascal_case entry_type.name}} } from './common.js'; + +test('create {{pascal_case entry_type.name}}', async () => { + await runScenario(async scenario => { + // Construct proper paths for your app. + // This assumes app bundle created by the `hc app pack` command. + const testAppPath = process.cwd() + '/../workdir/{{app_name}}.happ'; + + // Set up the app to be installed + const appSource = { appBundleSource: { path: testAppPath } }; + + // Add 2 players with the test app to the Scenario. The returned players + // can be destructured. + const [alice, bob] = await scenario.addPlayersWithApps([appSource, appSource]); + + // Shortcut peer discovery through gossip and register all agents in every + // conductor of the scenario. + await scenario.shareAllAgents(); + + // Alice creates a {{pascal_case entry_type.name}} + const record: Record = await create{{pascal_case entry_type.name}}(alice.cells[0]); + assert.ok(record); + }); +}); + +test('create and read {{pascal_case entry_type.name}}', async () => { + await runScenario(async scenario => { + // Construct proper paths for your app. + // This assumes app bundle created by the `hc app pack` command. + const testAppPath = process.cwd() + '/../workdir/{{app_name}}.happ'; + + // Set up the app to be installed + const appSource = { appBundleSource: { path: testAppPath } }; + + // Add 2 players with the test app to the Scenario. The returned players + // can be destructured. + const [alice, bob] = await scenario.addPlayersWithApps([appSource, appSource]); + + // Shortcut peer discovery through gossip and register all agents in every + // conductor of the scenario. + await scenario.shareAllAgents(); + + const sample = await sample{{pascal_case entry_type.name}}(alice.cells[0]); + + // Alice creates a {{pascal_case entry_type.name}} + const record: Record = await create{{pascal_case entry_type.name}}(alice.cells[0], sample); + assert.ok(record); + + // Wait for the created entry to be propagated to the other node. + await dhtSync([alice, bob], alice.cells[0].cell_id[0]); + + // Bob gets the created {{pascal_case entry_type.name}} + const createReadOutput: Record = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "{{#if crud.update}}get_original_{{snake_case entry_type.name}}{{else}}get_{{snake_case entry_type.name}}{{/if}}", + payload: {{#if entry_type.reference_entry_hash}}(record.signed_action.hashed.content as NewEntryAction).entry_hash{{else}}record.signed_action.hashed.hash{{/if}}, + }); + assert.deepEqual(sample, decode((createReadOutput.entry as any).Present.entry) as any); + + {{#each entry_type.fields}} + {{#if linked_from}} + {{#if (ne (pascal_case linked_from.name) (pascal_case ../entry_type.name))}} + // Bob gets the {{pascal_case (plural linked_from.name)}} for the new {{pascal_case ../entry_type.name}} + let linksTo{{pascal_case (plural linked_from.name)}}: Link[] = await bob.cells[0].callZome({ + zome_name: "{{../coordinator_zome_manifest.name}}", + fn_name: "get_{{snake_case (plural ../entry_type.name)}}_for_{{snake_case linked_from.name}}", + payload: {{#if (eq cardinality "vector")}}sample.{{field_name}}[0]{{else}}sample.{{field_name}}{{/if}} + }); + assert.equal(linksTo{{pascal_case (plural linked_from.name)}}.length, 1); + assert.deepEqual(linksTo{{pascal_case (plural linked_from.name)}}[0].target, {{#if ../entry_type.reference_entry_hash}}(record.signed_action.hashed.content as NewEntryAction).entry_hash{{else}}record.signed_action.hashed.hash{{/if}}); + {{/if}} + {{/if}} + {{/each}} + }); +}); + +{{#if crud.update}} +test('create and update {{pascal_case entry_type.name}}', async () => { + await runScenario(async scenario => { + // Construct proper paths for your app. + // This assumes app bundle created by the `hc app pack` command. + const testAppPath = process.cwd() + '/../workdir/{{app_name}}.happ'; + + // Set up the app to be installed + const appSource = { appBundleSource: { path: testAppPath } }; + + // Add 2 players with the test app to the Scenario. The returned players + // can be destructured. + const [alice, bob] = await scenario.addPlayersWithApps([appSource, appSource]); + + // Shortcut peer discovery through gossip and register all agents in every + // conductor of the scenario. + await scenario.shareAllAgents(); + + // Alice creates a {{pascal_case entry_type.name}} + const record: Record = await create{{pascal_case entry_type.name}}(alice.cells[0]); + assert.ok(record); + + const originalActionHash = record.signed_action.hashed.hash; + + // Alice updates the {{pascal_case entry_type.name}} + let contentUpdate: any = await sample{{pascal_case entry_type.name}}(alice.cells[0]); + let updateInput = { +{{#if link_from_original_to_each_update}} + original_{{snake_case entry_type.name}}_hash: originalActionHash, +{{/if}} + previous_{{snake_case entry_type.name}}_hash: originalActionHash, + updated_{{snake_case entry_type.name}}: contentUpdate, + }; + + let updatedRecord: Record = await alice.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "update_{{snake_case entry_type.name}}", + payload: updateInput, + }); + assert.ok(updatedRecord); + + // Wait for the updated entry to be propagated to the other node. + await dhtSync([alice, bob], alice.cells[0].cell_id[0]); + + // Bob gets the updated {{pascal_case entry_type.name}} + const readUpdatedOutput0: Record = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_latest_{{snake_case entry_type.name}}", + payload: updatedRecord.signed_action.hashed.hash, + }); + assert.deepEqual(contentUpdate, decode((readUpdatedOutput0.entry as any).Present.entry) as any); + + // Alice updates the {{pascal_case entry_type.name}} again + contentUpdate = await sample{{pascal_case entry_type.name}}(alice.cells[0]); + updateInput = { +{{#if link_from_original_to_each_update}} + original_{{snake_case entry_type.name}}_hash: originalActionHash, +{{/if}} + previous_{{snake_case entry_type.name}}_hash: updatedRecord.signed_action.hashed.hash, + updated_{{snake_case entry_type.name}}: contentUpdate, + }; + + updatedRecord = await alice.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "update_{{snake_case entry_type.name}}", + payload: updateInput, + }); + assert.ok(updatedRecord); + + // Wait for the updated entry to be propagated to the other node. + await dhtSync([alice, bob], alice.cells[0].cell_id[0]); + + // Bob gets the updated {{pascal_case entry_type.name}} + const readUpdatedOutput1: Record = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_latest_{{snake_case entry_type.name}}", + payload: updatedRecord.signed_action.hashed.hash, + }); + assert.deepEqual(contentUpdate, decode((readUpdatedOutput1.entry as any).Present.entry) as any); + + // Bob gets all the revisions for {{pascal_case entry_type.name}} + const revisions: Record[] = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_all_revisions_for_{{snake_case entry_type.name}}", + payload: originalActionHash, + }); + assert.equal(revisions.length, 3); + assert.deepEqual(contentUpdate, decode((revisions[2].entry as any).Present.entry) as any); + }); +}); +{{/if}} + +{{#if crud.delete}} +test('create and delete {{pascal_case entry_type.name}}', async () => { + await runScenario(async scenario => { + // Construct proper paths for your app. + // This assumes app bundle created by the `hc app pack` command. + const testAppPath = process.cwd() + '/../workdir/{{app_name}}.happ'; + + // Set up the app to be installed + const appSource = { appBundleSource: { path: testAppPath } }; + + // Add 2 players with the test app to the Scenario. The returned players + // can be destructured. + const [alice, bob] = await scenario.addPlayersWithApps([appSource, appSource]); + + // Shortcut peer discovery through gossip and register all agents in every + // conductor of the scenario. + await scenario.shareAllAgents(); + + const sample = await sample{{pascal_case entry_type.name}}(alice.cells[0]); + + // Alice creates a {{pascal_case entry_type.name}} + const record: Record = await create{{pascal_case entry_type.name}}(alice.cells[0], sample); + assert.ok(record); + + await dhtSync([alice, bob], alice.cells[0].cell_id[0]); + + {{#each entry_type.fields}} + {{#if linked_from}} + {{#if (ne (pascal_case linked_from.name) (pascal_case ../entry_type.name))}} + // Bob gets the {{pascal_case (plural linked_from.name)}} for the new {{pascal_case ../entry_type.name}} + let linksTo{{pascal_case (plural linked_from.name)}}: Link[] = await bob.cells[0].callZome({ + zome_name: "{{../coordinator_zome_manifest.name}}", + fn_name: "get_{{snake_case (plural ../entry_type.name)}}_for_{{snake_case linked_from.name}}", + payload: {{#if (eq cardinality "vector")}}sample.{{field_name}}[0]{{else}}sample.{{field_name}}{{/if}} + }); + assert.equal(linksTo{{pascal_case (plural linked_from.name)}}.length, 1); + assert.deepEqual(linksTo{{pascal_case (plural linked_from.name)}}[0].target, {{#if ../entry_type.reference_entry_hash}}(record.signed_action.hashed.content as NewEntryAction).entry_hash{{else}}record.signed_action.hashed.hash{{/if}}); + {{/if}} + {{/if}} + {{/each}} + + // Alice deletes the {{pascal_case entry_type.name}} + const deleteActionHash = await alice.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "delete_{{snake_case entry_type.name}}", + payload: record.signed_action.hashed.hash, + }); + assert.ok(deleteActionHash); + + // Wait for the entry deletion to be propagated to the other node. + await dhtSync([alice, bob], alice.cells[0].cell_id[0]); + + // Bob gets the oldest delete for the {{pascal_case entry_type.name}} + const oldestDeleteFor{{pascal_case entry_type.name}}: SignedActionHashed = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_oldest_delete_for_{{snake_case entry_type.name}}", + payload: record.signed_action.hashed.hash, + }); + assert.ok(oldestDeleteFor{{pascal_case entry_type.name}}); + + // Bob gets the deletions for the {{pascal_case entry_type.name}} + const deletesFor{{pascal_case entry_type.name}}: SignedActionHashed[] = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_all_deletes_for_{{snake_case entry_type.name}}", + payload: record.signed_action.hashed.hash, + }); + assert.equal(deletesFor{{title_case entry_type.name}}.length, 1); + + {{#each entry_type.fields}} + {{#if linked_from}} + {{#if (ne (pascal_case linked_from.name) (pascal_case ../entry_type.name))}} + // Bob gets the {{pascal_case (plural linked_from.name)}} for the {{pascal_case ../entry_type.name}} again + linksTo{{pascal_case (plural linked_from.name)}} = await bob.cells[0].callZome({ + zome_name: "{{../coordinator_zome_manifest.name}}", + fn_name: "get_{{snake_case (plural ../entry_type.name)}}_for_{{snake_case linked_from.name}}", + payload: {{#if (eq cardinality "vector")}}sample.{{field_name}}[0]{{else}}sample.{{field_name}}{{/if}} + }); + assert.equal(linksTo{{pascal_case (plural linked_from.name)}}.length, 0); + + // Bob gets the deleted {{pascal_case (plural linked_from.name)}} for the {{pascal_case ../entry_type.name}} + const deletedLinksTo{{pascal_case (plural linked_from.name)}}: Array<[SignedActionHashed, SignedActionHashed[]]> = await bob.cells[0].callZome({ + zome_name: "{{../coordinator_zome_manifest.name}}", + fn_name: "get_deleted_{{snake_case (plural ../entry_type.name)}}_for_{{snake_case linked_from.name}}", + payload: {{#if (eq cardinality "vector")}}sample.{{field_name}}[0]{{else}}sample.{{field_name}}{{/if}} + }); + assert.equal(deletedLinksTo{{pascal_case (plural linked_from.name)}}.length, 1); + {{/if}} + {{/if}} + {{/each}} + + }); +}); +{{/if}} diff --git "a/templates/headless/link-type/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{#if to_referenceable}}{{kebab_case from_referenceable.name}}-to-{{kebab_case (plural to_referenceable.name)}}.test.ts{{\302\241if}}.hbs" "b/templates/headless/link-type/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{#if to_referenceable}}{{kebab_case from_referenceable.name}}-to-{{kebab_case (plural to_referenceable.name)}}.test.ts{{\302\241if}}.hbs" new file mode 100644 index 000000000..4d8314202 --- /dev/null +++ "b/templates/headless/link-type/tests/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{#if to_referenceable}}{{kebab_case from_referenceable.name}}-to-{{kebab_case (plural to_referenceable.name)}}.test.ts{{\302\241if}}.hbs" @@ -0,0 +1,158 @@ +import { assert, test } from "vitest"; + +import { runScenario, dhtSync, CallableCell } from '@holochain/tryorama'; +import { + NewEntryAction, + ActionHash, + Record, + Link, + CreateLink, + DeleteLink, + SignedActionHashed, + AppBundleSource, + fakeActionHash, + fakeAgentPubKey, + fakeEntryHash +} from '@holochain/client'; +import { decode } from '@msgpack/msgpack'; + +{{#if (ne from_referenceable.hash_type "AgentPubKey")}} +import { create{{pascal_case from_referenceable.name}} } from './common.js'; +{{/if}} +{{#if (ne to_referenceable.hash_type "AgentPubKey")}} +import { create{{pascal_case to_referenceable.name}} } from './common.js'; +{{/if}} + +test('link a {{pascal_case from_referenceable.name}} to a {{pascal_case to_referenceable.name}}', async () => { + await runScenario(async scenario => { + // Construct proper paths for your app. + // This assumes app bundle created by the `hc app pack` command. + const testAppPath = process.cwd() + '/../workdir/{{app_name}}.happ'; + + // Set up the app to be installed + const appSource = { appBundleSource: { path: testAppPath } }; + + // Add 2 players with the test app to the Scenario. The returned players + // can be destructured. + const [alice, bob] = await scenario.addPlayersWithApps([appSource, appSource]); + + // Shortcut peer discovery through gossip and register all agents in every + // conductor of the scenario. + await scenario.shareAllAgents(); + +{{#if (eq from_referenceable.hash_type "AgentPubKey")}} + const baseAddress = alice.agentPubKey; +{{else}} + const baseRecord = await create{{pascal_case from_referenceable.name}}(alice.cells[0]); + {{#if (eq from_referenceable.hash_type "EntryHash")}} + const baseAddress = (baseRecord.signed_action.hashed.content as NewEntryAction).entry_hash; + {{else}} + const baseAddress = baseRecord.signed_action.hashed.hash; + {{/if}} +{{/if}} +{{#if (eq to_referenceable.hash_type "AgentPubKey")}} + const targetAddress = alice.agentPubKey; +{{else}} + const targetRecord = await create{{pascal_case to_referenceable.name}}(alice.cells[0]); + {{#if (eq to_referenceable.hash_type "EntryHash")}} + const targetAddress = (targetRecord.signed_action.hashed.content as NewEntryAction).entry_hash; + {{else}} + const targetAddress = targetRecord.signed_action.hashed.hash; + {{/if}} +{{/if}} + + // Bob gets the links, should be empty + let linksOutput: Link[] = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_{{plural (snake_case to_referenceable.name)}}_for_{{snake_case from_referenceable.name}}", + payload: baseAddress + }); + assert.equal(linksOutput.length, 0); + + // Alice creates a link from {{pascal_case from_referenceable.name}} to {{pascal_case to_referenceable.name}} + await alice.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "add_{{snake_case to_referenceable.name}}_for_{{snake_case from_referenceable.name}}", + payload: { + base_{{snake_case from_referenceable.singular_arg}}: baseAddress, + target_{{snake_case to_referenceable.singular_arg}}: targetAddress + } + }); + + await dhtSync([alice, bob], alice.cells[0].cell_id[0]); + + // Bob gets the links again + linksOutput = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_{{plural (snake_case to_referenceable.name)}}_for_{{snake_case from_referenceable.name}}", + payload: baseAddress + }); + assert.equal(linksOutput.length, 1); +{{#if (ne to_referenceable.hash_type "AgentPubKey")}} + assert.deepEqual(targetAddress, linksOutput[0].target); +{{/if}} + +{{#if bidirectional}} + + // Bob gets the links in the inverse direction + linksOutput = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_{{plural (snake_case from_referenceable.name)}}_for_{{snake_case to_referenceable.name}}", + payload: targetAddress + }); + assert.equal(linksOutput.length, 1); + {{#if (ne from_referenceable.hash_type "AgentPubKey")}} + assert.deepEqual(baseAddress, linksOutput[0].target); + {{/if}} +{{/if}} + +{{#if delete}} + await alice.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "remove_{{snake_case to_referenceable.name}}_for_{{snake_case from_referenceable.name}}", + payload: { + base_{{snake_case from_referenceable.singular_arg}}: baseAddress, + target_{{snake_case to_referenceable.singular_arg}}: targetAddress + } + }); + + await dhtSync([alice, bob], alice.cells[0].cell_id[0]); + + // Bob gets the links again + linksOutput = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_{{plural (snake_case to_referenceable.name)}}_for_{{snake_case from_referenceable.name}}", + payload: baseAddress + }); + assert.equal(linksOutput.length, 0); + + // Bob gets the deleted links + let deletedLinksOutput: Array<[SignedActionHashed, SignedActionHashed[]]> = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_deleted_{{plural (snake_case to_referenceable.name)}}_for_{{snake_case from_referenceable.name}}", + payload: baseAddress + }); + assert.equal(deletedLinksOutput.length, 1); + + {{#if bidirectional}} + // Bob gets the links in the inverse direction + linksOutput = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_{{plural (snake_case from_referenceable.name)}}_for_{{snake_case to_referenceable.name}}", + payload: targetAddress + }); + assert.equal(linksOutput.length, 0); + + // Bob gets the deleted links in the inverse direction + deletedLinksOutput = await bob.cells[0].callZome({ + zome_name: "{{coordinator_zome_manifest.name}}", + fn_name: "get_deleted_{{plural (snake_case from_referenceable.name)}}_for_{{snake_case to_referenceable.name}}", + payload: targetAddress + }); + assert.equal(deletedLinksOutput.length, 1); + {{/if}} + +{{/if}} + }); +}); + diff --git a/templates/headless/web-app.instructions.hbs b/templates/headless/web-app.instructions.hbs new file mode 100644 index 000000000..ef6f1bc8b --- /dev/null +++ b/templates/headless/web-app.instructions.hbs @@ -0,0 +1,18 @@ +This is a headless skeleton for a hApp. + +To build the app you can run + + npm run build:happ + +The `ui` directory is empty; you are expected to add your frontend there if you still intend to include one. + +Note that the scaffolding cli does not generate UI components for this template. + +- For setting up a frontend, you can use a build tool such as vite: + + cd {{app_name}} + npm create vite@latest ./ui + +- Ensure your UI's local dev server port is set using the UI_PORT environment variable from the root package.json. +- Update the "network"{{#if holo_enabled}} and "network:holo"{{/if}} script in the root package.json to include your ui's start script. +- Add a "package" script in your package.json that builds the ui and creates a zip archive of all files in the ui directory. \ No newline at end of file diff --git a/templates/headless/web-app/.github/workflows/test.yaml.hbs b/templates/headless/web-app/.github/workflows/test.yaml.hbs new file mode 100644 index 000000000..3c256fa8f --- /dev/null +++ b/templates/headless/web-app/.github/workflows/test.yaml.hbs @@ -0,0 +1,30 @@ +name: "test" +on: + # Trigger the workflow on push or pull request, + # but only for the main branch + push: + branches: [ main, develop ] + pull_request: + branches: [ main, develop ] + +jobs: + testbuild: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install nix + uses: cachix/install-nix-action@v25 + with: + install_url: https://releases.nixos.org/nix/nix-2.20.0/install + extra_nix_config: | + experimental-features = flakes nix-command + + - uses: cachix/cachix-action@v14 + with: + name: holochain-ci + + - name: Install and test + run: | + nix develop --command bash -c "npm i && npm t" + diff --git a/templates/headless/web-app/README.md.hbs b/templates/headless/web-app/README.md.hbs new file mode 100644 index 000000000..5274b8049 --- /dev/null +++ b/templates/headless/web-app/README.md.hbs @@ -0,0 +1,59 @@ +# {{title_case app_name}} + +## Environment Setup + +> PREREQUISITE: set up the [holochain development environment](https://developer.holochain.org/docs/install/). + +Enter the nix shell by running this in the root folder of the repository: + +```bash +nix develop +npm install +``` + +**Run all the other instructions in this README from inside this nix shell, otherwise they won't work**. + +## Running 2 agents + +```bash +npm start +``` + +This will create a network of 2 nodes connected to each other and their respective UIs. +It will also bring up the Holochain Playground for advanced introspection of the conductors. + +## Running the backend tests + +```bash +npm test +``` + +## Bootstrapping a network + +Create a custom network of nodes connected to each other and their respective UIs with: + +```bash +AGENTS=3 npm run network +``` + +Substitute the "3" for the number of nodes that you want to bootstrap in your network. +This will also bring up the Holochain Playground for advanced introspection of the conductors. + +## Packaging + +To package the web happ: +``` bash +npm run package +``` + +You'll have the `{{app_name}}.webhapp` in `workdir`. This is what you should distribute so that the Holochain Launcher can install it. +You will also have its subcomponent `{{app_name}}.happ` in the same folder`. + +## Documentation + +This repository is using these tools: +- [NPM Workspaces](https://docs.npmjs.com/cli/v7/using-npm/workspaces/): npm v7's built-in monorepo capabilities. +- [hc](https://github.com/holochain/holochain/tree/develop/crates/hc): Holochain CLI to easily manage Holochain development instances. +- [@holochain/tryorama](https://www.npmjs.com/package/@holochain/tryorama): test framework. +- [@holochain/client](https://www.npmjs.com/package/@holochain/client): client library to connect to Holochain from the UI. +- [@holochain-playground/cli](https://www.npmjs.com/package/@holochain-playground/cli): introspection tooling to understand what's going on in the Holochain nodes. diff --git a/templates/headless/web-app/package.json.hbs b/templates/headless/web-app/package.json.hbs new file mode 100644 index 000000000..150b60512 --- /dev/null +++ b/templates/headless/web-app/package.json.hbs @@ -0,0 +1,37 @@ +{ + "name": "{{app_name}}-dev", + "private": true, + "workspaces": [ + "ui", + "tests" + ], + "scripts": { + "start": "AGENTS=${AGENTS:-2} BOOTSTRAP_PORT=$(port) SIGNAL_PORT=$(port) npm run network", + "network": "hc sandbox clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm run launch:happ\" \"holochain-playground\"", + "test": "npm run build:zomes && hc app pack workdir --recursive && npm t -w tests", + "launch:happ": "hc-spin -n $AGENTS --ui-port $UI_PORT workdir/{{app_name}}.happ", + "start:tauri": "AGENTS=${AGENTS:-2} BOOTSTRAP_PORT=$(port) SIGNAL_PORT=$(port) npm run network:tauri", + "network:tauri": "hc sandbox clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm run launch:tauri\" \"holochain-playground\"", + "launch:tauri": "concurrently \"hc run-local-services --bootstrap-port $BOOTSTRAP_PORT --signal-port $SIGNAL_PORT\" \"echo pass | RUST_LOG=warn hc launch --piped -n $AGENTS workdir/{{app_name}}.happ --ui-port $UI_PORT network --bootstrap http://127.0.0.1:\"$BOOTSTRAP_PORT\" webrtc ws://127.0.0.1:\"$SIGNAL_PORT\"\"", + {{#if holo_enabled}} + "start:holo": "AGENTS=${AGENTS:-2} npm run network:holo", + "network:holo": "npm run build:happ && UI_PORT=$(port) concurrently \"npm run launch:holo-dev-server\" \"holochain-playground ws://localhost:4444\"", + "launch:holo-dev-server": "holo-dev-server workdir/{{app_name}}.happ", + {{/if}} + "build:happ": "npm run build:zomes && hc app pack workdir --recursive", + "build:zomes": "RUSTFLAGS='' CARGO_TARGET_DIR=target cargo build --release --target wasm32-unknown-unknown" + }, + "devDependencies": { + "@holochain-playground/cli": "^0.1.1", + "@holochain/hc-spin": "{{hc_spin_version}}", + "concurrently": "^6.2.1", + "rimraf": "^3.0.2", + {{#if holo_enabled}} + "concurrently-repeat": "^0.0.1", + {{/if}} + "new-port-cli": "^1.0.0" + }, + "engines": { + "npm": ">=7.0.0" + } +} diff --git a/templates/headless/web-app/tests/package.json.hbs b/templates/headless/web-app/tests/package.json.hbs new file mode 100644 index 000000000..8821314bc --- /dev/null +++ b/templates/headless/web-app/tests/package.json.hbs @@ -0,0 +1,15 @@ +{ + "name": "tests", + "private": true, + "scripts": { + "test": "vitest run" + }, + "dependencies": { + "@msgpack/msgpack": "^2.8.0", + "@holochain/client": "{{holochain_client_version}}", + "@holochain/tryorama": "{{tryorama_version}}", + "typescript": "^4.9.4", + "vitest": "^0.28.4" + }, + "type": "module" +} diff --git a/templates/headless/web-app/tests/tsconfig.json.hbs b/templates/headless/web-app/tests/tsconfig.json.hbs new file mode 100644 index 000000000..88643849d --- /dev/null +++ b/templates/headless/web-app/tests/tsconfig.json.hbs @@ -0,0 +1,9 @@ +{ + "compilerOptions": { + "target": "ES2017", + "module": "ESNext", + "moduleResolution": "node", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true + } +} diff --git a/templates/headless/web-app/tests/vitest.config.ts.hbs b/templates/headless/web-app/tests/vitest.config.ts.hbs new file mode 100644 index 000000000..7737dbd2a --- /dev/null +++ b/templates/headless/web-app/tests/vitest.config.ts.hbs @@ -0,0 +1,9 @@ +import { defineConfig } from 'vitest/config' + +export default defineConfig({ + test: { + threads: false, + testTimeout: 60*1000*3 // 3 mins + }, +}) + diff --git a/templates/headless/web-app/workdir/web-happ.yaml.hbs b/templates/headless/web-app/workdir/web-happ.yaml.hbs new file mode 100644 index 000000000..939546954 --- /dev/null +++ b/templates/headless/web-app/workdir/web-happ.yaml.hbs @@ -0,0 +1,7 @@ +--- +manifest_version: "1" +name: {{app_name}} +ui: + bundled: ~ +happ_manifest: + bundled: "./{{app_name}}.happ" diff --git a/templates/lit/web-app/package.json.hbs b/templates/lit/web-app/package.json.hbs index 8cf0e076c..fdeb2f8f4 100644 --- a/templates/lit/web-app/package.json.hbs +++ b/templates/lit/web-app/package.json.hbs @@ -7,11 +7,11 @@ ], "scripts": { "start": "AGENTS=${AGENTS:-2} BOOTSTRAP_PORT=$(port) SIGNAL_PORT=$(port) npm run network", - "network": "hc s clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:happ\" \"holochain-playground\"", + "network": "hc sandbox clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:happ\" \"holochain-playground\"", "test": "npm run build:zomes && hc app pack workdir --recursive && npm t -w tests", "launch:happ": "hc-spin -n $AGENTS --ui-port $UI_PORT workdir/{{app_name}}.happ", "start:tauri": "AGENTS=${AGENTS:-2} BOOTSTRAP_PORT=$(port) SIGNAL_PORT=$(port) npm run network:tauri", - "network:tauri": "hc s clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:tauri\" \"holochain-playground\"", + "network:tauri": "hc sandbox clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:tauri\" \"holochain-playground\"", "launch:tauri": "concurrently \"hc run-local-services --bootstrap-port $BOOTSTRAP_PORT --signal-port $SIGNAL_PORT\" \"echo pass | RUST_LOG=warn hc launch --piped -n $AGENTS workdir/{{app_name}}.happ --ui-port $UI_PORT network --bootstrap http://127.0.0.1:\"$BOOTSTRAP_PORT\" webrtc ws://127.0.0.1:\"$SIGNAL_PORT\"\"", {{#if holo_enabled}} "start:holo": "AGENTS=${AGENTS:-2} npm run network:holo", diff --git a/templates/svelte/web-app/package.json.hbs b/templates/svelte/web-app/package.json.hbs index 8cf0e076c..1c68cdfa1 100644 --- a/templates/svelte/web-app/package.json.hbs +++ b/templates/svelte/web-app/package.json.hbs @@ -7,18 +7,18 @@ ], "scripts": { "start": "AGENTS=${AGENTS:-2} BOOTSTRAP_PORT=$(port) SIGNAL_PORT=$(port) npm run network", - "network": "hc s clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:happ\" \"holochain-playground\"", + "network": "hc sandbox clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:happ\" \"holochain-playground\"", "test": "npm run build:zomes && hc app pack workdir --recursive && npm t -w tests", "launch:happ": "hc-spin -n $AGENTS --ui-port $UI_PORT workdir/{{app_name}}.happ", "start:tauri": "AGENTS=${AGENTS:-2} BOOTSTRAP_PORT=$(port) SIGNAL_PORT=$(port) npm run network:tauri", - "network:tauri": "hc s clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:tauri\" \"holochain-playground\"", + "network:tauri": "hc sandbox clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:tauri\" \"holochain-playground\"", "launch:tauri": "concurrently \"hc run-local-services --bootstrap-port $BOOTSTRAP_PORT --signal-port $SIGNAL_PORT\" \"echo pass | RUST_LOG=warn hc launch --piped -n $AGENTS workdir/{{app_name}}.happ --ui-port $UI_PORT network --bootstrap http://127.0.0.1:\"$BOOTSTRAP_PORT\" webrtc ws://127.0.0.1:\"$SIGNAL_PORT\"\"", {{#if holo_enabled}} "start:holo": "AGENTS=${AGENTS:-2} npm run network:holo", "network:holo": "npm run build:happ && UI_PORT=$(port) concurrently \"npm run launch:holo-dev-server\" \"holochain-playground ws://localhost:4444\" \"concurrently-repeat 'VITE_APP_CHAPERONE_URL=http://localhost:24274 VITE_APP_IS_HOLO=true npm start -w ui' $AGENTS\"", "launch:holo-dev-server": "holo-dev-server workdir/{{app_name}}.happ", {{/if}} - "package": "npm run build:happ && npm run package -w ui && hc web-app pack workdir --recursive", + "package": "npm run build:happ && hc web-app pack workdir --recursive", "build:happ": "npm run build:zomes && hc app pack workdir --recursive", "build:zomes": "RUSTFLAGS='' CARGO_TARGET_DIR=target cargo build --release --target wasm32-unknown-unknown" }, diff --git a/templates/vanilla/web-app/package.json.hbs b/templates/vanilla/web-app/package.json.hbs index 6c7a929ed..2e10c6d18 100644 --- a/templates/vanilla/web-app/package.json.hbs +++ b/templates/vanilla/web-app/package.json.hbs @@ -7,11 +7,11 @@ ], "scripts": { "start": "AGENTS=${AGENTS:-2} npm run network", - "network": "hc s clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:happ\" \"holochain-playground\"", + "network": "hc sandbox clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:happ\" \"holochain-playground\"", "test": "npm run build:zomes && hc app pack workdir --recursive && npm t -w tests", "launch:happ": "hc-spin -n $AGENTS --ui-port $UI_PORT workdir/{{app_name}}.happ", "start:tauri": "AGENTS=${AGENTS:-2} BOOTSTRAP_PORT=$(port) SIGNAL_PORT=$(port) npm run network:tauri", - "network:tauri": "hc s clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:tauri\" \"holochain-playground\"", + "network:tauri": "hc sandbox clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:tauri\" \"holochain-playground\"", "launch:tauri": "concurrently \"hc run-local-services --bootstrap-port $BOOTSTRAP_PORT --signal-port $SIGNAL_PORT\" \"echo pass | RUST_LOG=warn hc launch --piped -n $AGENTS workdir/{{app_name}}.happ --ui-port $UI_PORT network --bootstrap http://127.0.0.1:\"$BOOTSTRAP_PORT\" webrtc ws://127.0.0.1:\"$SIGNAL_PORT\"\"", "package": "npm run build:happ && npm run package -w ui && hc web-app pack workdir --recursive", "build:happ": "npm run build:zomes && hc app pack workdir --recursive", diff --git "a/templates/vue/entry-type/ui/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{#if crud.update}}Edit{{pascal_case entry_type.name}}.vue{{\302\241if}}.hbs" "b/templates/vue/entry-type/ui/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{#if crud.update}}Edit{{pascal_case entry_type.name}}.vue{{\302\241if}}.hbs" index ffefe6fda..6de15efcd 100644 --- "a/templates/vue/entry-type/ui/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{#if crud.update}}Edit{{pascal_case entry_type.name}}.vue{{\302\241if}}.hbs" +++ "b/templates/vue/entry-type/ui/src/{{dna_role_name}}/{{coordinator_zome_manifest.name}}/{{#if crud.update}}Edit{{pascal_case entry_type.name}}.vue{{\302\241if}}.hbs" @@ -69,9 +69,9 @@ export default defineComponent({ {{#each entry_type.fields}} {{#if widget}} {{#if (not (eq cardinality "vector" ) )}} - {{camel_case field_name}}: current{{pascal_case ../entry_type.name}}.{{camel_case field_name}}, + {{camel_case field_name}}: current{{pascal_case ../entry_type.name}}.{{snake_case field_name}}, {{else}} - {{camel_case field_name}}: current{{pascal_case ../entry_type.name}}.{{camel_case field_name}}, + {{camel_case field_name}}: current{{pascal_case ../entry_type.name}}.{{snake_case field_name}}, {{/if}} {{/if}} {{/each}} diff --git a/templates/vue/web-app/package.json.hbs b/templates/vue/web-app/package.json.hbs index 62d433c61..bb42c4d3a 100644 --- a/templates/vue/web-app/package.json.hbs +++ b/templates/vue/web-app/package.json.hbs @@ -7,11 +7,11 @@ ], "scripts": { "start": "AGENTS=${AGENTS:-2} BOOTSTRAP_PORT=$(port) SIGNAL_PORT=$(port) npm run network", - "network": "hc s clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:happ\" \"holochain-playground\"", + "network": "hc sandbox clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:happ\" \"holochain-playground\"", "test": "npm run build:zomes && hc app pack workdir --recursive && npm t -w tests", "launch:happ": "hc-spin -n $AGENTS --ui-port $UI_PORT workdir/{{app_name}}.happ", "start:tauri": "AGENTS=${AGENTS:-2} BOOTSTRAP_PORT=$(port) SIGNAL_PORT=$(port) npm run network:tauri", - "network:tauri": "hc s clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:tauri\" \"holochain-playground\"", + "network:tauri": "hc sandbox clean && npm run build:happ && UI_PORT=$(port) concurrently \"npm start -w ui\" \"npm run launch:tauri\" \"holochain-playground\"", "launch:tauri": "concurrently \"hc run-local-services --bootstrap-port $BOOTSTRAP_PORT --signal-port $SIGNAL_PORT\" \"echo pass | RUST_LOG=warn hc launch --piped -n $AGENTS workdir/{{app_name}}.happ --ui-port $UI_PORT network --bootstrap http://127.0.0.1:\"$BOOTSTRAP_PORT\" webrtc ws://127.0.0.1:\"$SIGNAL_PORT\"\"", {{#if holo_enabled}} "start:holo": "AGENTS=${AGENTS:-2} npm run network:holo",