diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 667cfaed..b5fa8be9 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -4,12 +4,7 @@ You are an expert in Cosmos blockchain, specializing in CometBFT, Cosmos SDK, Co ## Architecture Overview -This is a **CosmWasm smart contract ecosystem** for the AXONE protocol, comprising four specialized contracts in a layered architecture: - -- **`axone-objectarium`**: Immutable object storage (foundation layer) -- **`axone-cognitarium`**: RDF/semantic triple storage with SPARQL querying -- **`axone-law-stone`**: Prolog-based governance rules (depends on objectarium for persistence) -- **`axone-dataverse`**: Protocol orchestrator (depends on cognitarium + law-stone) +This is a **CosmWasm smart contract ecosystem** for the AXONE protocol, comprising four specialized contracts in a layered architecture. **Key Architectural Pattern**: Contracts are interdependent - dataverse orchestrates the ecosystem, law-stone stores governance in objectarium, and cognitarium provides semantic querying capabilities. diff --git a/.releaserc.cjs b/.releaserc.cjs index 60c8d5f6..c513b14c 100644 --- a/.releaserc.cjs +++ b/.releaserc.cjs @@ -65,8 +65,8 @@ module.exports = { { file: "Cargo.toml", hasChanged: true, - numMatches: 7, - numReplacements: 7, + numMatches: 1, + numReplacements: 1, }, ], }, @@ -85,19 +85,8 @@ module.exports = { { successComment: false, assets: [ - { path: "./artifacts/axone_objectarium.wasm" }, - { path: "./artifacts/axone_law_stone.wasm" }, - { path: "./artifacts/axone_cognitarium.wasm" }, - { path: "./artifacts/axone_dataverse.wasm" }, + { path: "./artifacts/axone_dummy.wasm" }, { path: "./artifacts/checksums.txt" }, - { - path: "./contracts/axone-objectarium/schema/axone-objectarium.json", - }, - { path: "./contracts/axone-law-stone/schema/axone-law-stone.json" }, - { - path: "./contracts/axone-cognitarium/schema/axone-cognitarium.json", - }, - { path: "./contracts/axone-dataverse/schema/axone-dataverse.json" }, ], }, ], diff --git a/.size-limit.json b/.size-limit.json index 091797b2..75cde0a9 100644 --- a/.size-limit.json +++ b/.size-limit.json @@ -1,24 +1,6 @@ [ { - "path": "target/wasm32-unknown-unknown/release/axone_objectarium.wasm", - "running": false, - "brotli": false, - "gzip": false - }, - { - "path": "target/wasm32-unknown-unknown/release/axone_law_stone.wasm", - "running": false, - "brotli": false, - "gzip": false - }, - { - "path": "target/wasm32-unknown-unknown/release/axone_cognitarium.wasm", - "running": false, - "brotli": false, - "gzip": false - }, - { - "path": "target/wasm32-unknown-unknown/release/axone_dataverse.wasm", + "path": "target/wasm32-unknown-unknown/release/axone_dummy.wasm", "running": false, "brotli": false, "gzip": false diff --git a/Cargo.lock b/Cargo.lock index 2f789ce9..83d14dab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14,76 +14,12 @@ dependencies = [ "zerocopy 0.7.35", ] -[[package]] -name = "aho-corasick" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" -dependencies = [ - "memchr", -] - [[package]] name = "allocator-api2" version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" -[[package]] -name = "anstream" -version = "0.6.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "is_terminal_polyfill", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" - -[[package]] -name = "anstyle-parse" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a64c907d4e79225ac72e2a354c9ce84d50ebb4586dee56c82b3ee73004f537f5" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" -dependencies = [ - "anstyle", - "windows-sys 0.52.0", -] - -[[package]] -name = "anyhow" -version = "1.0.93" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" - [[package]] name = "ark-bls12-381" version = "0.4.0" @@ -108,7 +44,7 @@ dependencies = [ "ark-std", "derivative", "hashbrown 0.13.2", - "itertools 0.10.5", + "itertools", "num-traits", "rayon", "zeroize", @@ -126,7 +62,7 @@ dependencies = [ "ark-std", "derivative", "digest", - "itertools 0.10.5", + "itertools", "num-bigint", "num-traits", "paste", @@ -205,18 +141,6 @@ dependencies = [ "rayon", ] -[[package]] -name = "arrayref" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" - -[[package]] -name = "arrayvec" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" - [[package]] name = "autocfg" version = "1.3.0" @@ -224,161 +148,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] -name = "axone-cognitarium" -version = "8.0.0" -dependencies = [ - "axone-rdf", - "base64", - "blake3", - "cosmwasm-schema", - "cosmwasm-std", - "cucumber", - "cw-storage-plus", - "cw-utils", - "cw2", - "derive_builder", - "either", - "futures", - "rio_api", - "rio_turtle", - "rio_xml", - "schemars", - "serde", - "serde_yaml", - "testing", - "thiserror 2.0.17", -] - -[[package]] -name = "axone-cognitarium-client" -version = "8.0.0" -dependencies = [ - "axone-cognitarium", - "cosmwasm-std", - "serde", -] - -[[package]] -name = "axone-dataverse" -version = "8.0.0" -dependencies = [ - "axone-cognitarium", - "axone-cognitarium-client", - "axone-rdf", - "base64", - "bech32", - "cosmwasm-schema", - "cosmwasm-std", - "cw-storage-plus", - "cw-utils", - "cw2", - "itertools 0.14.0", - "multibase", - "rio_api", - "ripemd", - "serde", - "sha2", - "testing", - "thiserror 2.0.17", - "unsigned-varint", -] - -[[package]] -name = "axone-law-stone" -version = "8.0.0" -dependencies = [ - "axone-logic-bindings", - "axone-objectarium", - "axone-objectarium-client", - "axone-wasm", - "cosmwasm-schema", - "cosmwasm-std", - "cw-storage-plus", - "cw-utils", - "cw2", - "itertools 0.14.0", - "serde", - "testing", - "thiserror 2.0.17", - "url", -] - -[[package]] -name = "axone-logic-bindings" -version = "8.0.0" -dependencies = [ - "cosmwasm-std", - "schemars", - "serde", - "thiserror 2.0.17", -] - -[[package]] -name = "axone-objectarium" +name = "axone-dummy" version = "8.0.0" dependencies = [ - "base16ct", - "base64", - "bin-it", - "bs58", + "axone-dummy-lib", "cosmwasm-schema", "cosmwasm-std", - "cw-ownable", - "cw-storage-plus", - "cw-utils", - "cw2", - "derive_builder", - "enum-iterator", - "lzma-rs", - "md-5", - "schemars", - "serde", - "sha2", - "snap", - "testing", - "thiserror 2.0.17", -] - -[[package]] -name = "axone-objectarium-client" -version = "8.0.0" -dependencies = [ - "axone-objectarium", - "axone-wasm", - "cosmwasm-std", - "serde", -] - -[[package]] -name = "axone-rdf" -version = "8.0.0" -dependencies = [ - "base16ct", - "cosmwasm-std", - "itertools 0.14.0", - "rio_api", - "rio_turtle", - "rio_xml", - "sha2", - "thiserror 2.0.17", ] [[package]] -name = "axone-wasm" +name = "axone-dummy-lib" version = "8.0.0" -dependencies = [ - "form_urlencoded", - "serde", - "serde-json-wasm", - "thiserror 2.0.17", - "url", -] - -[[package]] -name = "base-x" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" [[package]] name = "base16ct" @@ -386,16 +166,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" -[[package]] -name = "base256emoji" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e9430d9a245a77c92176e649af6e275f20839a48389859d1661e9a128d077c" -dependencies = [ - "const-str", - "match-lookup", -] - [[package]] name = "base64" version = "0.22.1" @@ -408,31 +178,6 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d965446196e3b7decd44aa7ee49e31d630118f90ef12f97900f262eb915c951d" -[[package]] -name = "bin-it" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efdc68b7631892303d28c13375b6a8a12508f6bbb9c8743518dad7b840d5610c" - -[[package]] -name = "bitflags" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" - -[[package]] -name = "blake3" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3888aaa89e4b2a40fca9848e400f6a658a5a3978de7be858e209cafa8be9a4a0" -dependencies = [ - "arrayref", - "arrayvec", - "cc", - "cfg-if", - "constant_time_eq", -] - [[package]] name = "block-buffer" version = "0.10.3" @@ -448,130 +193,24 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e31ea183f6ee62ac8b8a8cf7feddd766317adfb13ff469de57ce033efd6a790" -[[package]] -name = "bs58" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "bstr" -version = "1.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" -dependencies = [ - "memchr", - "serde", -] - -[[package]] -name = "bytecount" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" - [[package]] name = "byteorder" version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" -[[package]] -name = "cc" -version = "1.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72db2f7947ecee9b03b510377e8bb9077afa27176fdbff55c51027e976fdcc48" -dependencies = [ - "shlex", -] - [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "clap" -version = "4.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", - "terminal_size", -] - -[[package]] -name = "clap_derive" -version = "4.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" -dependencies = [ - "heck 0.5.0", - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "clap_lex" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" - -[[package]] -name = "colorchoice" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" - -[[package]] -name = "console" -version = "0.15.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" -dependencies = [ - "encode_unicode", - "lazy_static", - "libc", - "unicode-width", - "windows-sys 0.52.0", -] - [[package]] name = "const-oid" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" -[[package]] -name = "const-str" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f421161cb492475f1661ddc9815a745a1c894592070661180fdec3d4872e9c3" - -[[package]] -name = "constant_time_eq" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" - [[package]] name = "cosmwasm-core" version = "2.2.2" @@ -599,7 +238,7 @@ dependencies = [ "rand_core", "rayon", "sha2", - "thiserror 1.0.69", + "thiserror", ] [[package]] @@ -623,7 +262,7 @@ dependencies = [ "schemars", "serde", "serde_json", - "thiserror 1.0.69", + "thiserror", ] [[package]] @@ -649,7 +288,7 @@ dependencies = [ "cosmwasm-core", "cosmwasm-crypto", "cosmwasm-derive", - "derive_more 1.0.0-beta.6", + "derive_more", "hex", "rand_core", "rmp-serde", @@ -658,7 +297,7 @@ dependencies = [ "serde-json-wasm", "sha2", "static_assertions", - "thiserror 1.0.69", + "thiserror", ] [[package]] @@ -670,21 +309,6 @@ dependencies = [ "libc", ] -[[package]] -name = "crc" -version = "3.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" - [[package]] name = "crossbeam-deque" version = "0.8.5" @@ -732,65 +356,6 @@ dependencies = [ "typenum", ] -[[package]] -name = "cucumber" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cd12917efc3a8b069a4975ef3cb2f2d835d42d04b3814d90838488f9dd9bf69" -dependencies = [ - "anyhow", - "clap", - "console", - "cucumber-codegen", - "cucumber-expressions", - "derive_more 0.99.17", - "drain_filter_polyfill", - "either", - "futures", - "gherkin", - "globwalk", - "humantime", - "inventory", - "itertools 0.13.0", - "lazy-regex", - "linked-hash-map", - "once_cell", - "pin-project", - "regex", - "sealed", - "smart-default", -] - -[[package]] -name = "cucumber-codegen" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e19cd9e8e7cfd79fbf844eb6a7334117973c01f6bad35571262b00891e60f1c" -dependencies = [ - "cucumber-expressions", - "inflections", - "itertools 0.13.0", - "proc-macro2", - "quote", - "regex", - "syn 2.0.87", - "synthez", -] - -[[package]] -name = "cucumber-expressions" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d794fed319eea24246fb5f57632f7ae38d61195817b7eb659455aa5bdd7c1810" -dependencies = [ - "derive_more 0.99.17", - "either", - "nom", - "nom_locate", - "regex", - "regex-syntax 0.7.5", -] - [[package]] name = "curve25519-dalek" version = "4.1.3" @@ -819,34 +384,20 @@ dependencies = [ ] [[package]] -name = "cw-address-like" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73553ee4dad5b1678977ff603e72c3fdd41518ca2b0bd9b245b21e4c72eafa9e" -dependencies = [ - "cosmwasm-std", -] - -[[package]] -name = "cw-ownable" -version = "2.1.0" +name = "der" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2f8ee96ac5342c795a0610410998fc075a95af8c796b6d16479cdffd2471f1" +checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" dependencies = [ - "cosmwasm-schema", - "cosmwasm-std", - "cw-address-like", - "cw-ownable-derive", - "cw-storage-plus", - "cw-utils", - "thiserror 1.0.69", + "const-oid", + "zeroize", ] [[package]] -name = "cw-ownable-derive" -version = "0.6.0" +name = "derivative" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e69178d27793063dd13812777709cf7d7860ba35a598094d4bd89e1b30c9341" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2", "quote", @@ -854,194 +405,31 @@ dependencies = [ ] [[package]] -name = "cw-storage-plus" -version = "2.0.0" +name = "derive_more" +version = "1.0.0-beta.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f13360e9007f51998d42b1bc6b7fa0141f74feae61ed5fd1e5b0a89eec7b5de1" +checksum = "f7abbfc297053be59290e3152f8cbcd52c8642e0728b69ee187d991d4c1af08d" dependencies = [ - "cosmwasm-std", - "schemars", - "serde", + "derive_more-impl", ] [[package]] -name = "cw-utils" -version = "2.0.0" +name = "derive_more-impl" +version = "1.0.0-beta.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07dfee7f12f802431a856984a32bce1cb7da1e6c006b5409e3981035ce562dec" +checksum = "2bba3e9872d7c58ce7ef0fcf1844fcc3e23ef2a58377b50df35dd98e42a5726e" dependencies = [ - "cosmwasm-schema", - "cosmwasm-std", - "schemars", - "serde", - "thiserror 1.0.69", + "proc-macro2", + "quote", + "syn 2.0.87", + "unicode-xid", ] [[package]] -name = "cw2" -version = "2.0.0" +name = "digest" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b04852cd38f044c0751259d5f78255d07590d136b8a86d4e09efdd7666bd6d27" -dependencies = [ - "cosmwasm-schema", - "cosmwasm-std", - "cw-storage-plus", - "schemars", - "semver", - "serde", - "thiserror 1.0.69", -] - -[[package]] -name = "darling" -version = "0.20.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" -dependencies = [ - "darling_core", - "darling_macro", -] - -[[package]] -name = "darling_core" -version = "0.20.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.87", -] - -[[package]] -name = "darling_macro" -version = "0.20.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" -dependencies = [ - "darling_core", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "data-encoding" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" - -[[package]] -name = "data-encoding-macro" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20c01c06f5f429efdf2bae21eb67c28b3df3cf85b7dd2d8ef09c0838dac5d33e" -dependencies = [ - "data-encoding", - "data-encoding-macro-internal", -] - -[[package]] -name = "data-encoding-macro-internal" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0047d07f2c89b17dd631c80450d69841a6b5d7fb17278cbc43d7e4cfcf2576f3" -dependencies = [ - "data-encoding", - "syn 1.0.107", -] - -[[package]] -name = "der" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" -dependencies = [ - "const-oid", - "zeroize", -] - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.107", -] - -[[package]] -name = "derive_builder" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" -dependencies = [ - "derive_builder_macro", -] - -[[package]] -name = "derive_builder_core" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" -dependencies = [ - "darling", - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "derive_builder_macro" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" -dependencies = [ - "derive_builder_core", - "syn 2.0.87", -] - -[[package]] -name = "derive_more" -version = "0.99.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.107", -] - -[[package]] -name = "derive_more" -version = "1.0.0-beta.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7abbfc297053be59290e3152f8cbcd52c8642e0728b69ee187d991d4c1af08d" -dependencies = [ - "derive_more-impl", -] - -[[package]] -name = "derive_more-impl" -version = "1.0.0-beta.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bba3e9872d7c58ce7ef0fcf1844fcc3e23ef2a58377b50df35dd98e42a5726e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", - "unicode-xid", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "const-oid", @@ -1049,23 +437,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "drain_filter_polyfill" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "669a445ee724c5c69b1b06fe0b63e70a1c84bc9bb7d9696cd4f4e3ec45050408" - [[package]] name = "dyn-clone" version = "1.0.9" @@ -1133,48 +504,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "encode_unicode" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" - -[[package]] -name = "enum-iterator" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c280b9e6b3ae19e152d8e31cf47f18389781e119d4013a2a2bb0180e5facc635" -dependencies = [ - "enum-iterator-derive", -] - -[[package]] -name = "enum-iterator-derive" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ab991c1362ac86c61ab6f556cff143daa22e5a15e4e189df818b2fd19fe65b" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "equivalent" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" - -[[package]] -name = "errno" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - [[package]] name = "ff" version = "0.13.0" @@ -1191,110 +520,6 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "form_urlencoded" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "futures" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" - -[[package]] -name = "futures-executor" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" - -[[package]] -name = "futures-macro" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "futures-sink" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" - -[[package]] -name = "futures-task" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" - -[[package]] -name = "futures-util" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - [[package]] name = "generic-array" version = "0.14.6" @@ -1317,47 +542,6 @@ dependencies = [ "wasi", ] -[[package]] -name = "gherkin" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20b79820c0df536d1f3a089a2fa958f61cb96ce9e0f3f8f507f5a31179567755" -dependencies = [ - "heck 0.4.1", - "peg", - "quote", - "serde", - "serde_json", - "syn 2.0.87", - "textwrap", - "thiserror 1.0.69", - "typed-builder", -] - -[[package]] -name = "globset" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" -dependencies = [ - "aho-corasick", - "bstr", - "log", - "regex-automata", - "regex-syntax 0.8.3", -] - -[[package]] -name = "globwalk" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" -dependencies = [ - "bitflags", - "ignore", - "walkdir", -] - [[package]] name = "group" version = "0.13.0" @@ -1388,18 +572,6 @@ dependencies = [ "allocator-api2", ] -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - [[package]] name = "hex" version = "0.4.3" @@ -1416,380 +588,44 @@ dependencies = [ ] [[package]] -name = "humantime" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" - -[[package]] -name = "icu_collections" -version = "1.5.0" +name = "itertools" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ - "displaydoc", - "yoke", - "zerofrom", - "zerovec", + "either", ] [[package]] -name = "icu_locid" -version = "1.5.0" +name = "itoa" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] +checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754" [[package]] -name = "icu_locid_transform" -version = "1.5.0" +name = "k256" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +checksum = "956ff9b67e26e1a6a866cb758f12c6f8746208489e3e4a4b5580802f2f0a587b" dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", + "cfg-if", + "ecdsa", + "elliptic-curve", + "sha2", ] [[package]] -name = "icu_locid_transform_data" -version = "1.5.0" +name = "libc" +version = "0.2.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" +checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" [[package]] -name = "icu_normalizer" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "utf16_iter", - "utf8_iter", - "write16", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" - -[[package]] -name = "icu_properties" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_locid_transform", - "icu_properties_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" - -[[package]] -name = "icu_provider" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_provider_macros", - "stable_deref_trait", - "tinystr", - "writeable", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - -[[package]] -name = "idna" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" -dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" -dependencies = [ - "icu_normalizer", - "icu_properties", -] - -[[package]] -name = "ignore" -version = "0.4.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" -dependencies = [ - "crossbeam-deque", - "globset", - "log", - "memchr", - "regex-automata", - "same-file", - "walkdir", - "winapi-util", -] - -[[package]] -name = "indexmap" -version = "2.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" -dependencies = [ - "equivalent", - "hashbrown 0.14.5", -] - -[[package]] -name = "inflections" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a257582fdcde896fd96463bf2d40eefea0580021c0712a0e2b028b60b47a837a" - -[[package]] -name = "inventory" -version = "0.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f958d3d68f4167080a18141e10381e7634563984a537f2a49a30fd8e53ac5767" - -[[package]] -name = "is_terminal_polyfill" -version = "1.70.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754" - -[[package]] -name = "k256" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "956ff9b67e26e1a6a866cb758f12c6f8746208489e3e4a4b5580802f2f0a587b" -dependencies = [ - "cfg-if", - "ecdsa", - "elliptic-curve", - "sha2", -] - -[[package]] -name = "lazy-regex" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d12be4595afdf58bd19e4a9f4e24187da2a66700786ff660a418e9059937a4c" -dependencies = [ - "lazy-regex-proc_macros", - "once_cell", - "regex", -] - -[[package]] -name = "lazy-regex-proc_macros" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44bcd58e6c97a7fcbaffcdc95728b393b8d98933bfadad49ed4097845b57ef0b" -dependencies = [ - "proc-macro2", - "quote", - "regex", - "syn 2.0.87", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "libc" -version = "0.2.152" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" - -[[package]] -name = "linked-hash-map" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" - -[[package]] -name = "linux-raw-sys" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" - -[[package]] -name = "litemap" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" - -[[package]] -name = "log" -version = "0.4.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" - -[[package]] -name = "lzma-rs" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e" -dependencies = [ - "byteorder", - "crc", -] - -[[package]] -name = "match-lookup" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1265724d8cb29dbbc2b0f06fffb8bf1a8c0cf73a78eede9ba73a4a66c52a981e" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.107", -] - -[[package]] -name = "md-5" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" -dependencies = [ - "cfg-if", - "digest", -] - -[[package]] -name = "memchr" -version = "2.7.2" +name = "memchr" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - -[[package]] -name = "multibase" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8694bb4835f452b0e3bb06dbebb1d6fc5385b6ca1caf2e55fd165c042390ec77" -dependencies = [ - "base-x", - "base256emoji", - "data-encoding", - "data-encoding-macro", -] - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - -[[package]] -name = "nom_locate" -version = "4.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e3c83c053b0713da60c5b8de47fe8e494fe3ece5267b2f23090a07a053ba8f3" -dependencies = [ - "bytecount", - "memchr", - "nom", -] - [[package]] name = "num-bigint" version = "0.4.6" @@ -1824,18 +660,6 @@ version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" -[[package]] -name = "oxilangtag" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d91edf4fbb970279443471345a4e8c491bf05bb283b3e6c88e4e606fd8c181b" - -[[package]] -name = "oxiri" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb175ec8981211357b7b379869c2f8d555881c55ea62311428ec0de46d89bd5c" - [[package]] name = "p256" version = "0.13.2" @@ -1854,71 +678,6 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" -[[package]] -name = "peg" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f76678828272f177ac33b7e2ac2e3e73cc6c1cd1e3e387928aa69562fa51367" -dependencies = [ - "peg-macros", - "peg-runtime", -] - -[[package]] -name = "peg-macros" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "636d60acf97633e48d266d7415a9355d4389cea327a193f87df395d88cd2b14d" -dependencies = [ - "peg-runtime", - "proc-macro2", - "quote", -] - -[[package]] -name = "peg-runtime" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9555b1514d2d99d78150d3c799d4c357a3e2c2a8062cd108e93a06d9057629c5" - -[[package]] -name = "percent-encoding" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" - -[[package]] -name = "pin-project" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - [[package]] name = "ppv-lite86" version = "0.2.18" @@ -1941,895 +700,325 @@ dependencies = [ name = "proc-macro2" version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quick-xml" -version = "0.36.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96a05e2e8efddfa51a84ca47cec303fac86c8541b686d37cac5efc0e094417bc" -dependencies = [ - "memchr", -] - -[[package]] -name = "quote" -version = "1.0.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom", -] - -[[package]] -name = "rayon" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "regex" -version = "1.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata", - "regex-syntax 0.8.3", -] - -[[package]] -name = "regex-automata" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax 0.8.3", -] - -[[package]] -name = "regex-syntax" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" - -[[package]] -name = "regex-syntax" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" - -[[package]] -name = "rfc6979" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" -dependencies = [ - "hmac", - "subtle", -] - -[[package]] -name = "rio_api" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61d0c76ddf8b00cbb4d2c5932d067d49245c2f1f651809bde3cf265033ddb1af" - -[[package]] -name = "rio_turtle" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6f351b77353c7c896f0cd5ced2a25a7e95b5360cb68d1d7c16682ee096d7f40" -dependencies = [ - "oxilangtag", - "oxiri", - "rio_api", -] - -[[package]] -name = "rio_xml" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abd3384ae785ed3b0159607adc08adef580a28e277fbfa375c42d162e9da93b1" -dependencies = [ - "oxilangtag", - "oxiri", - "quick-xml", - "rio_api", -] - -[[package]] -name = "ripemd" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" -dependencies = [ - "digest", -] - -[[package]] -name = "rmp" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4" -dependencies = [ - "byteorder", - "num-traits", - "paste", -] - -[[package]] -name = "rmp-serde" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52e599a477cf9840e92f2cde9a7189e67b42c57532749bf90aea6ec10facd4db" -dependencies = [ - "byteorder", - "rmp", - "serde", -] - -[[package]] -name = "rustc_version" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" -dependencies = [ - "semver", -] - -[[package]] -name = "rustix" -version = "0.38.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys", - "windows-sys 0.52.0", -] - -[[package]] -name = "ryu" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "schemars" -version = "0.8.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" -dependencies = [ - "dyn-clone", - "schemars_derive", - "serde", - "serde_json", -] - -[[package]] -name = "schemars_derive" -version = "0.8.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" -dependencies = [ - "proc-macro2", - "quote", - "serde_derive_internals", - "syn 2.0.87", -] - -[[package]] -name = "sealed" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a8caec23b7800fb97971a1c6ae365b6239aaeddfb934d6265f8505e795699d" -dependencies = [ - "heck 0.4.1", - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "sec1" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48518a2b5775ba8ca5b46596aae011caa431e6ce7e4a67ead66d92f08884220e" -dependencies = [ - "base16ct", - "der", - "generic-array", - "subtle", - "zeroize", -] - -[[package]] -name = "semver" -version = "1.0.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" - -[[package]] -name = "serde" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde-json-wasm" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f05da0d153dd4595bdffd5099dc0e9ce425b205ee648eb93437ff7302af8c9a5" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_core" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "serde_derive_internals" -version = "0.29.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "serde_json" -version = "1.0.133" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" -dependencies = [ - "itoa", - "memchr", - "ryu", - "serde", -] - -[[package]] -name = "serde_yaml" -version = "0.9.34+deprecated" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" -dependencies = [ - "indexmap", - "itoa", - "ryu", - "serde", - "unsafe-libyaml", -] - -[[package]] -name = "sha2" -version = "0.10.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "signature" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" -dependencies = [ - "digest", - "rand_core", -] - -[[package]] -name = "slab" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] - -[[package]] -name = "smallvec" -version = "1.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" - -[[package]] -name = "smart-default" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eb01866308440fc64d6c44d9e86c5cc17adfe33c4d6eed55da9145044d0ffc1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "smawk" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" - -[[package]] -name = "snap" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" - -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "subtle" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" - -[[package]] -name = "syn" -version = "1.0.107" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.87" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "synstructure" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "synthez" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3d2c2202510a1e186e63e596d9318c91a8cbe85cd1a56a7be0c333e5f59ec8d" -dependencies = [ - "syn 2.0.87", - "synthez-codegen", - "synthez-core", -] - -[[package]] -name = "synthez-codegen" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f724aa6d44b7162f3158a57bccd871a77b39a4aef737e01bcdff41f4772c7746" -dependencies = [ - "syn 2.0.87", - "synthez-core", -] - -[[package]] -name = "synthez-core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78bfa6ec52465e2425fd43ce5bbbe0f0b623964f7c63feb6b10980e816c654ea" -dependencies = [ - "proc-macro2", - "quote", - "sealed", - "syn 2.0.87", -] - -[[package]] -name = "terminal_size" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" -dependencies = [ - "rustix", - "windows-sys 0.48.0", -] - -[[package]] -name = "testing" -version = "5.0.0" -dependencies = [ - "cosmwasm-std", -] - -[[package]] -name = "textwrap" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" -dependencies = [ - "smawk", - "unicode-linebreak", - "unicode-width", -] - -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl 1.0.69", -] - -[[package]] -name = "thiserror" -version = "2.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" -dependencies = [ - "thiserror-impl 2.0.17", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "tinystr" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" -dependencies = [ - "displaydoc", - "zerovec", -] - -[[package]] -name = "tinyvec" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "typed-builder" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe83c85a85875e8c4cb9ce4a890f05b23d38cd0d47647db7895d3d2a79566d2" -dependencies = [ - "typed-builder-macro", -] - -[[package]] -name = "typed-builder-macro" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29a3151c41d0b13e3d011f98adc24434560ef06673a155a6c7f66b9879eecce2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - -[[package]] -name = "typenum" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" - -[[package]] -name = "unicode-ident" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd" - -[[package]] -name = "unicode-linebreak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +dependencies = [ + "unicode-ident", +] [[package]] -name = "unicode-width" -version = "0.1.12" +name = "quote" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +dependencies = [ + "proc-macro2", +] [[package]] -name = "unicode-xid" -version = "0.2.4" +name = "rand" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "rand_chacha", + "rand_core", +] [[package]] -name = "unsafe-libyaml" -version = "0.2.11" +name = "rand_chacha" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] [[package]] -name = "unsigned-varint" -version = "0.8.0" +name = "rand_core" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb066959b24b5196ae73cb057f45598450d2c5f71460e98c49b738086eff9c06" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] [[package]] -name = "url" -version = "2.5.7" +name = "rayon" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", + "either", + "rayon-core", ] [[package]] -name = "utf16_iter" -version = "1.0.5" +name = "rayon-core" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] [[package]] -name = "utf8_iter" -version = "1.0.4" +name = "rfc6979" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] [[package]] -name = "utf8parse" -version = "0.2.1" +name = "rmp" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4" +dependencies = [ + "byteorder", + "num-traits", + "paste", +] [[package]] -name = "version_check" -version = "0.9.4" +name = "rmp-serde" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "52e599a477cf9840e92f2cde9a7189e67b42c57532749bf90aea6ec10facd4db" +dependencies = [ + "byteorder", + "rmp", + "serde", +] [[package]] -name = "walkdir" -version = "2.5.0" +name = "rustc_version" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "same-file", - "winapi-util", + "semver", ] [[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +name = "ryu" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" [[package]] -name = "winapi-util" -version = "0.1.8" +name = "schemars" +version = "0.8.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" dependencies = [ - "windows-sys 0.52.0", + "dyn-clone", + "schemars_derive", + "serde", + "serde_json", ] [[package]] -name = "windows-sys" -version = "0.48.0" +name = "schemars_derive" +version = "0.8.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" dependencies = [ - "windows-targets 0.48.5", + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.87", ] [[package]] -name = "windows-sys" -version = "0.52.0" +name = "sec1" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "48518a2b5775ba8ca5b46596aae011caa431e6ce7e4a67ead66d92f08884220e" dependencies = [ - "windows-targets 0.52.5", + "base16ct", + "der", + "generic-array", + "subtle", + "zeroize", ] [[package]] -name = "windows-targets" -version = "0.48.5" +name = "semver" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] -name = "windows-targets" -version = "0.52.5" +name = "serde" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ - "windows_aarch64_gnullvm 0.52.5", - "windows_aarch64_msvc 0.52.5", - "windows_i686_gnu 0.52.5", - "windows_i686_gnullvm", - "windows_i686_msvc 0.52.5", - "windows_x86_64_gnu 0.52.5", - "windows_x86_64_gnullvm 0.52.5", - "windows_x86_64_msvc 0.52.5", + "serde_core", + "serde_derive", ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.5" +name = "serde-json-wasm" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" +checksum = "f05da0d153dd4595bdffd5099dc0e9ce425b205ee648eb93437ff7302af8c9a5" +dependencies = [ + "serde", +] [[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" +name = "serde_core" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] [[package]] -name = "windows_aarch64_msvc" -version = "0.52.5" +name = "serde_derive" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] [[package]] -name = "windows_i686_gnu" -version = "0.48.5" +name = "serde_derive_internals" +version = "0.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] [[package]] -name = "windows_i686_gnu" -version = "0.52.5" +name = "serde_json" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] [[package]] -name = "windows_i686_gnullvm" -version = "0.52.5" +name = "sha2" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] [[package]] -name = "windows_i686_msvc" -version = "0.48.5" +name = "signature" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" +dependencies = [ + "digest", + "rand_core", +] [[package]] -name = "windows_i686_msvc" -version = "0.52.5" +name = "static_assertions" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" +name = "subtle" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] -name = "windows_x86_64_gnu" -version = "0.52.5" +name = "syn" +version = "1.0.107" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" +checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] [[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" +name = "syn" +version = "2.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] [[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.5" +name = "thiserror" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl", +] [[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" +name = "thiserror-impl" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] [[package]] -name = "windows_x86_64_msvc" -version = "0.52.5" +name = "typenum" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" +checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] -name = "write16" -version = "1.0.0" +name = "unicode-ident" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" +checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd" [[package]] -name = "writeable" -version = "0.5.5" +name = "unicode-xid" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] -name = "yoke" -version = "0.7.4" +name = "version_check" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" -dependencies = [ - "serde", - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] -name = "yoke-derive" -version = "0.7.4" +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", - "synstructure", -] +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "zerocopy" @@ -2872,27 +1061,6 @@ dependencies = [ "syn 2.0.87", ] -[[package]] -name = "zerofrom" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", - "synstructure", -] - [[package]] name = "zeroize" version = "1.7.0" @@ -2912,25 +1080,3 @@ dependencies = [ "quote", "syn 2.0.87", ] - -[[package]] -name = "zerovec" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] diff --git a/Cargo.toml b/Cargo.toml index 0d1cbd47..3d3a1523 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,32 +24,6 @@ panic = 'abort' rpath = false [workspace.dependencies] -axone-cognitarium = { path = "./contracts/axone-cognitarium", version = "8.0.0", features = [ - "library", -] } -axone-cognitarium-client = { path = "./packages/axone-cognitarium-client", version = "8.0.0" } -axone-logic-bindings = { path = "./packages/axone-logic-bindings", version = "8.0.0" } -axone-objectarium = { path = "./contracts/axone-objectarium", version = "8.0.0", features = [ - "library", -] } -axone-objectarium-client = { path = "./packages/axone-objectarium-client", version = "8.0.0" } -axone-rdf = { path = "./packages/axone-rdf", version = "8.0.0" } -axone-wasm = { path = "./packages/axone-wasm", version = "8.0.0" } cosmwasm-schema = "2.2.2" cosmwasm-std = { version = "2.2.2", features = ["cosmwasm_2_2"] } -cosmwasm-storage = "1.5.2" -cw-multi-test = "2.2.0" -cw-storage-plus = "2.0.0" -cw-utils = "2.0.0" -cw2 = "2.0.0" -iref = "3.1.3" -langtag = "0.3.4" -rdf-types = "0.18.2" -rio_api = "0.8.5" -rio_turtle = "0.8.5" -rio_xml = "0.8.5" -schemars = "0.8.22" serde = { version = "1.0.228", default-features = false, features = ["derive"] } -serde-json-wasm = "1.0.1" -testing = { path = "packages/testing" } -thiserror = { version = "2.0.17" } diff --git a/README.md b/README.md index 9c3b6bb0..3b8beafb 100644 --- a/README.md +++ b/README.md @@ -12,163 +12,20 @@ [![semantic-release](https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg?style=for-the-badge)](https://github.com/semantic-release/semantic-release) [![license](https://img.shields.io/badge/License-BSD_3--Clause-blue.svg?style=for-the-badge)](https://opensource.org/licenses/BSD-3-Clause) +## 🚧⚠️ **Repo under reconstruction** ⚠️🚧 + +### Hey there! We're rebuilding the smart contract stack from the ground up + +*Old code has been archived, and a new foundation is on its way.* + +Looking for the previous implementation? +👉 Check the [**last release**](https://github.com/axone-protocol/contracts/releases/v8.0.0). + ## ✨ Smart Contracts This repository hosts Smart Contracts that are deployed on the [AXONE network](http://axone.xyz). But they are compatible with any [Cosmos blockchains](https://cosmos.network/) that uses the [CosmWasm](https://cosmwasm.com/) framework. -### 🗄️ Storage oriented Smart Contracts - -> The class of Smart Contracts which act as reliable repositories, offering secure, efficient, and auditable storage -> and retrieval of various data types on the blockchain. - - - - - - - - - - - - - - -
- objectarium -
→ Tech documentation -
- - status: mature - - - version - - - crates - -
-

A Smart Contract which enables the storage of arbitrary unstructured Objects in any Cosmos blockchains.

It provides a flexible solution for storing data without imposing strict schemas or predefined structures, accommodating various data formats and types.

-
- type: storage - - kind: object - - state: immutable -
- - - - - - - - - - - - - - -
- cognitarium -
→ Tech documentation -
- - status: intermediate - - - version - - - crates - -

A Smart Contract which facilitates the storage and querying of semantic data using RDF (Resource Description Framework), which represents information as semantic triples.

In essence, it serves as a repository for knowledge, providing a dedicated space for storing and retrieving semantic information within any Cosmos blockchains.

- type: storage - - kind: semantic - - state: mutable -
- -### ⚖️ Sovereignty oriented Smart Contracts - -> The class of Smart Contracts designed to prioritize the sovereignty of the involved parties by allowing them to define and enforce their own rules and regulations. - - - - - - - - - - - - - - -
- law-stone -
→ Tech documentation -
- - status: mature - - - version - - - crates - -
-

A multifaceted Smart Contract designed to express and evaluate legal content, including but not limited to regulations, agreements, consents and permissions.

It offers extraordinary adaptability and can be utilized in any blockchain within the Cosmos ecosystem that seeks to establish robust and flexible on-chain governance systems.

-
- type: sovereignty - - kind: logic - - state: immutable -
- -### 🌐 Resource Management oriented Smart Contracts - -> The class of Smart Contracts designed to manage and orchestrate the lifecycle of various resources in the Dataverse. - - - - - - - - - - - - - - -
- dataverse -
→ Tech documentation -
- - status: genesis - - - version - - - crates - -
-

A Smart Contract responsible for overseeing and managing the Dataverse within the AXONE network.

The Dataverse is an ever-expanding universe that encompasses a wide range of Digital Resources, including datasets, data processing algorithms, ML algorithm, storage resources, computational resources, identity management solutions, orchestration engines, oracles, and many other resources recorded on the blockchain.

-
- type: resource management - state: mutable -
- ## 🥚 Maturity The maturity of each contract is indicated by the following emojis. diff --git a/contracts/axone-cognitarium/Cargo.toml b/contracts/axone-cognitarium/Cargo.toml deleted file mode 100644 index 73e75c77..00000000 --- a/contracts/axone-cognitarium/Cargo.toml +++ /dev/null @@ -1,55 +0,0 @@ -[package] -authors = { workspace = true } -description = "A CosmWasm Smart Contract which enables the storage and querying of Semantic data using RDF, which represents information as semantic triples." -edition = { workspace = true } -homepage = { workspace = true } -keywords = { workspace = true } -license = { workspace = true } -name = "axone-cognitarium" -repository = { workspace = true } -rust-version = { workspace = true } -version = { workspace = true } - -exclude = [ - # Those files are rust-optimizer artifacts. You might want to commit them for convenience but they should not be part of the source code publication. - "contract.wasm", - "hash.txt", -] - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[lib] -crate-type = ["cdylib", "rlib"] - -[dependencies] -axone-rdf.workspace = true -blake3 = "1.8.2" -cosmwasm-schema.workspace = true -cosmwasm-std.workspace = true -cw-storage-plus.workspace = true -cw-utils.workspace = true -cw2.workspace = true -derive_builder = "0.20.2" -either = "1.15.0" -rio_api.workspace = true -rio_turtle.workspace = true -rio_xml.workspace = true -schemars.workspace = true -serde.workspace = true -thiserror.workspace = true - -[dev-dependencies] -base64 = "0.22.1" -cucumber = "0.21.1" -futures = "0.3.31" -serde_yaml = "0.9.34" -testing.workspace = true - -[features] -# use library feature to disable all instantiate/execute/query exports -library = [] - -[[test]] -harness = false -name = "cucumber" -path = "tests/e2e/main.rs" diff --git a/contracts/axone-cognitarium/README.md b/contracts/axone-cognitarium/README.md deleted file mode 100644 index 47e9c008..00000000 --- a/contracts/axone-cognitarium/README.md +++ /dev/null @@ -1,377 +0,0 @@ -# Cognitarium - -A [CosmWasm](https://cosmwasm.com/) Smart Contract which enables the storage and querying of Semantic data using [RDF (Resource Description Framework)](https://en.wikipedia.org/wiki/Resource_Description_Framework), which represents information as semantic triples. - -## Purpose - -The Smart Contract operates as a [semantic database](https://en.wikipedia.org/wiki/Triplestore), adept at storing and fetching [RDF triples](https://en.wikipedia.org/wiki/Semantic_triple) via semantic queries. It can be deployed on any blockchain within the [Cosmos blockchains](https://cosmos.network/) network, utilizing the [CosmWasm](https://cosmwasm.com/) framework. - -The key features of the contract include: - -**Insertion of RDF Triples:** -This functionality enables the insertion of new data in the form of [RDF triples](https://en.wikipedia.org/wiki/Semantic_triple) onto the blockchain, ensuring secure and tamper-proof storage. The Smart Contract supports inserting these triples in various serialization formats including [RDF/XML](https://en.wikipedia.org/wiki/RDF/XML), [Turtle](https://www.w3.org/TR/turtle/), [N-Triples](https://www.w3.org/TR/n-triples/) and [N-Quads](https://www.w3.org/TR/n-quads/). - -**Removal of RDF Triples:** -This functionality enables the selective deletion of RDF triples from the on-chain store. Users can specify patterns or criteria that identify the triples to be removed, ensuring precise and targeted removal of data. - -**Querying of RDF Triples:** -The Smart Contract provides powerful on-chain querying capabilities, allowing users to retrieve specific RDF triples stored on the blockchain. This is done using a variation of [SPARQL](https://www.w3.org/TR/sparql11-query/), a specialized query language designed for retrieving and manipulating data stored in RDF format. Users can specify their search criteria in the query, and the Smart Contract will return the matching RDF triples, directly accessing the on-chain data. This feature supports various serialization formats for the output, such as Turtle, N-Triples, N-Quads, and RDF/XML, offering flexibility in how the retrieved data is presented and used. - -**Policies of the Store:** -The Smart Contract includes a straightforward yet effective policies functionality to manage the capacity and usage of the on-chain storage effectively. These policies ensure efficient operation and prevent misuse or overuse of the Smart Contract. For instance: - -- Maximum Triples: Caps the total number of RDF triples the store can hold, preventing database overload. -- Storage Size Limit: Sets an upper bound on the store's data size in bytes, managing blockchain resource use. -- Query Size Limit: Restricts the size or complexity of queries to maintain fast and reliable data retrieval. -- Insert Data Limit: Limits the size of data (in bytes) that can be added in a single transaction, ensuring smooth and efficient data insertion. - -## Rationale - -The data preserved in the blockchain holds significant value due to its derivation from a distributed consensus, rendering it a reliable source for decision-making, applicable to both on-chain and off-chain scenarios. - -To effectively utilize this data, it's essential to adopt representation models that cater to diverse requirements. The Smart Contract Cognitarium provides such a model, facilitating the depiction of intricate and evolving semantic connections within a highly interconnected dataset. This approach transforms the data into a Knowledge Graph, enabling an accurate portrayal of existing facts and fostering the generation of new insights. - -## Play - -### Model your data with RDF - -[RDF](https://www.w3.org/RDF/) encodes information in triple structures. The basic structure of an RDF triple is `subject-predicate-object`, much like a simple sentence in the English language. - -1. **Subject**: The subject is the entity or resource the statement is about. It's typically a URI ([Uniform Resource Identifier](https://en.wikipedia.org/wiki/Uniform_Resource_Identifier)) which uniquely identifies a resource. -2. **Predicate**: The predicate (also called a property) is a specific aspect, characteristic, attribute, or relation that describes the subject. It's also typically a URI. -3. **Object**: The object is the value of the attribute defined by the predicate for the subject. It can be a URI or a literal (such as a string or a number) and may also include additional information such as a language tag or a datatype. - -In RDF, **prefixes** are used as a shorthand notation for long URIs to make the data more readable and less verbose. They're similar to namespaces in programming languages. For instance, instead of writing `http://www.w3.org/2001/XMLSchema#integer`, you could declare a prefix `xsd` to represent the `http://www.w3.org/2001/XMLSchema#` URI and then use `xsd:integer`. - -[Turtle (Terse RDF Triple Language)](https://www.w3.org/TR/turtle/) is a syntax that allows RDF to be completely written in a compact and natural text form, with abbreviations for common usage patterns and datatypes. - -Here's an RDF triple written in Turtle format (`.ttl` file): - -```turtle -@prefix ex: . -@prefix xsd: . - -ex:Alice ex:hasAge "30"^^xsd:integer . -``` - -In this example: - -- **`ex:Alice`** is the subject (using `ex` as a prefix for the `http://example.com/stuff/1.0/` URI). -- **`ex:hasAge`** is the predicate. -- **`"30"^^xsd:integer`** is the object, a literal of datatype integer (using **`xsd`** as a prefix for the XML Schema Datatype namespace). - -In the Turtle syntax, the semicolon (**`;`**) is used as a shorthand to reduce verbosity when multiple predicates and objects have the same subject. It allows you to write multiple predicates and objects for the same subject without having to repeat the subject. -The comma (**`,`**) is used as a shorthand for reducing verbosity when the same subject and predicate have multiple objects. - -Suppose we want to express that Alice is 30 years old person, and her email is `alice@example.com`: - -```turtle -@prefix ex: . -@prefix xsd: . - -ex:Alice a ; - ex:hasAge "30"^^xsd:integer ; - ex:hasEmail "alice@example.com" . -``` - -:::tip -The lowercase "a" is a special abbreviation for the RDF type property, which states that a resource is an instance of a particular class. This is essentially equivalent to **``**, and it's used to indicate the type of a resource. -::: - -The same RDF triple can be expressed in RDF/XML format (`.rdf.xml` file): - -```xml - - - - 30 - alice@example.com - - -``` - -### Instantiate the Smart Contract - -Let's initiate a new instance of Smart Contract and input some RDF triples into it. The `axone-cognitarium` can be set up in the following manner. Please consult the schema for additional details regarding configuration settings. - -```bash -axoned tx wasm instantiate $CODE_ID \ - --from $ADDR \ - --label "my-rdf-storage" \ - --admin $ADMIN_ADDR \ - --gas 1000000 \ - '{}' -``` - -:::tip -You can provide some limitation parameters to restrict usage for both execute and query messages. For instance, you can set a maximum number of triples that can be stored in the smart contract, or a maximum size of data that can be inserted in a single transaction. - -The default values are: - -```json -{ - "limits": { - "max_byte_size": "340282366920938463463374607431768211455", - "max_insert_data_byte_size": "340282366920938463463374607431768211455", - "max_insert_data_triple_count": "340282366920938463463374607431768211455", - "max_query_limit": 30, - "max_query_variable_count": 30, - "max_triple_byte_size": "340282366920938463463374607431768211455", - "max_triple_count": "340282366920938463463374607431768211455" - } -} -``` - -::: - -### Insert RDF triples - -To insert RDF triples, you need to send an `InsertData` message through the `cognitarium` smart contract you've already instantiated. For this operation, your inputs should include the data of the triples, encoded in [base64](https://en.wikipedia.org/wiki/Base64), as well as the format. The format options available are: - -- `turtle` (default) -- `rdf_xml` -- `n_triples` -- `n_quads` - -Let's consider the following example of data in Turtle format, contained within a file named `data.ttl`. It describes a small network of people and their relationships, such as name, title, and whom they know. - -```turtle -@prefix : . -@prefix foaf: . -@prefix schema: . - -:alice a foaf:Person ; - foaf:name "Alice" ; - foaf:knows :bob ; - schema:email "alice@example.org" . - -:bob a foaf:Person ; - foaf:name "Bob" ; - foaf:knows :alice, :carol ; - schema:jobTitle "Software Developer" . - -:carol a foaf:Person ; - foaf:name "Carol" ; - schema:jobTitle "Data Scientist" ; - foaf:knows :bob . -``` - -You can insert this data into the `cognitarium` smart contract with the following command: - -```bash -axoned tx wasm execute $CONTRACT_ADDR \ - --from axone1cu9wzlcyyxpek20jaqfwzu3llzjgx34cqf94yj \ - --gas 10000000 \ - "{\"insert_data\":{\"format\": \"turtle\", \"data\": \"$(cat data.ttl | base64 | tr -d '\n\r')\"}}" -``` - -With the transaction hash we can query the number of triples inserted: - -```bash -axoned query tx $TX_HASH -ojson | - jq -r '.events[] | select(.type == "wasm") | .attributes[] | select(.key == "triple_count") | .value' -``` - -### Query RDF triples - -Now that we've populated the axone-cognitarium with several triples, let's explore how to retrieve this data. We can utilize the Select query message for this purpose. If you're familiar with [SPARQL](https://www.w3.org/TR/rdf-sparql-query/), you'll find the process quite intuitive. - -A `select` query on a `cognitarium` instance enables you to fetch and filter the data. The `select.query` JSON should contain the following: - -- `prefixes` array: to declare a `prefix` and its related `namespace` -- `limit`: the number of elements to return -- `where`: filters and variable declarations -- `select` array: all `variable` names you declared in `where` you want to get - -`where` should be an array of elements specifying triple filterings. You have to specify `subject`, `predicate` and `object` as a `variable`, or, alternatively, a `prefixed` or `full` `named_node`. - -`object` can also be a `simple` `literal`. - -The following query will select all the triples `subject`, `predicate` and `object` from the store: - -```json -{ - "select": { - "query": { - "prefixes": [], - "select": [ - { - "variable": "subject" - }, - { - "variable": "predicate" - }, - { - "variable": "object" - } - ], - "where": [ - { - "simple": { - "triple_pattern": { - "subject": { - "variable": "subject" - }, - "predicate": { - "variable": "predicate" - }, - "object": { - "variable": "object" - } - } - } - } - ], - "limit": null - } - } -} -``` - -It's semantically equivalent to the following SPARQL query: - -```sparql -SELECT ?subject ?predicate ?object -WHERE { - ?subject ?predicate ?object -} -``` - -This query can be executed on the cognitarium smart contract using the command below: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - '{"select":{"query":{"prefixes":[],"select":[{"variable":"subject"},{"variable":"predicate"},{"variable":"object"}],"where":[{"simple":{"triple_pattern":{"subject":{"variable":"subject"},"predicate":{"variable":"predicate"},"object":{"variable":"object"}}}}],"limit":null}}}' -``` - -Now, let's try something more interesting. Let's retrieve the names of people and their job titles, but only for those who know at least one other person in the network. This query introduces filtering based on relationships. - -Here's the query: - -```json -{ - "select": { - "query": { - "prefixes": [ - { "foaf": "http://xmlns.com/foaf/0.1/" }, - { "schema": "http://schema.org/" } - ], - "select": [ - { - "variable": "personName" - }, - { - "variable": "jobTitle" - } - ], - "where": [ - { - "simple": { - "triple_pattern": { - "subject": { - "variable": "person" - }, - "predicate": { - "node": { - "named_node": { - "full": "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" - } - } - }, - "object": { - "node": { - "named_node": { - "prefixed": "foaf:Person" - } - } - } - } - } - }, - { - "simple": { - "triple_pattern": { - "subject": { - "variable": "person" - }, - "predicate": { - "node": { - "named_node": { - "prefixed": "foaf:Name" - } - } - }, - "object": { - "variable": "personName" - } - } - } - }, - { - "simple": { - "triple_pattern": { - "subject": { - "variable": "person" - }, - "predicate": { - "node": { - "named_node": { - "prefixed": "schema:jobTitle" - } - } - }, - "object": { - "variable": "jobTitle" - } - } - } - }, - { - "simple": { - "triple_pattern": { - "subject": { - "variable": "person" - }, - "predicate": { - "node": { - "named_node": { - "prefixed": "foaf:knows" - } - } - }, - "object": { - "variable": "knownPerson" - } - } - } - } - ], - "limit": null - } - } -} -``` - -It's semantically equivalent to the following SPARQL query: - -```sparql -PREFIX foaf: -PREFIX schema: - -SELECT ?personName ?jobTitle -WHERE { - ?person a foaf:Person . - ?person foaf:name ?personName . - ?person schema:jobTitle ?jobTitle . - ?person foaf:knows ?knownPerson . -} -``` - -This query can be executed on the cognitarium smart contract using the command below: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - '{"select":{"query":{"prefixes":[{"foaf":"http://xmlns.com/foaf/0.1/"},{"schema":"http://schema.org/"}],"select":[{"variable":"personName"},{"variable":"jobTitle"}],"where":[{"simple":{"triple_pattern":{"subject":{"variable":"person"},"predicate":{"node":{"named_node":{"full":"http://www.w3.org/1999/02/22-rdf-syntax-ns#type"}}},"object":{"node":{"named_node":{"prefixed":"foaf:Person"}}}}}},{"simple":{"triple_pattern":{"subject":{"variable":"person"},"predicate":{"node":{"named_node":{"prefixed":"foaf:Name"}}},"object":{"variable":"personName"}}}},{"simple":{"triple_pattern":{"subject":{"variable":"person"},"predicate":{"node":{"named_node":{"prefixed":"schema:jobTitle"}}},"object":{"variable":"jobTitle"}}}},{"simple":{"triple_pattern":{"subject":{"variable":"person"},"predicate":{"node":{"named_node":{"prefixed":"foaf:knows"}}},"object":{"variable":"knownPerson"}}}}],"limit":null}}}' -``` diff --git a/contracts/axone-cognitarium/examples/prolog-query/README.md b/contracts/axone-cognitarium/examples/prolog-query/README.md deleted file mode 100644 index 9972c1ef..00000000 --- a/contracts/axone-cognitarium/examples/prolog-query/README.md +++ /dev/null @@ -1,52 +0,0 @@ -# Prolog query - -In this example we'll see how to query the `axone-cognitarium` from Prolog programs executed by the AXONE logic module. - -We'll use for that purpose the [query.pl](query.pl) sample program, multiple predicates are defined in it, we'll explore them step by step. - -The query we'll make will be performed against a `axone-cognitarium` instance filled with the provided [sample data](../sample-data.rdf.xml), see the [basic example](../basic) to insert them. - -## Forge the CosmWasm query - -As seen in a [axone-law-stone example](../../../axone-law-stone/examples/multiple-sources), interaction with smart contracts from Prolog is based on the -interpreter virtual filesystem that'll handle dedicated cosmwasm URIs. - -It's worth to mention that to query cosmwasm smart contracts and getting the raw response we'll need to set in the related URI the param `base64Decode` to `false`. - -The `cosmwasm_query` predicate will help to create the cosmwasm URI, for example: - -```bash -axoned query logic ask \ - --program-file query.pl \ - "cosmwasm_query(cognitarium, '${CONTRACT_ADDR}', json([key-value]), false, URI)." -``` - -## Call the smart contract - -By calling the `cosmwasm_call` predicate with a cosmwasm URI we'll be able to get the JSON response, let's try it with a simple `axone-cognitarium` `Store` query which returns usage information about the triple store: - -```bash -axoned query logic ask \ - --program-file query.pl \ - "cosmwasm_query(cognitarium, '${CONTRACT_ADDR}', 'store', false, URI), cosmwasm_call(URI, Response)." -``` - -## Select query - -Through the `cognitarium_dataset_tags`, we can query the tags present in metadata describing a specific dataset, for example: - -```bash -axoned query logic ask \ - --program-file query.pl \ - "cognitarium_dataset_tags('${CONTRACT_ADDR}', 'https://ontology.axone.space/dataverse/dataset/0ea1fc7a-dd97-4adc-a10e-169c6597bcde', Tags)." -``` - -## Exploiting the response - -Using the `cognitarium_dataset_has_tag` predicate we show how to define rules based on the contract response, here on the present of a certain tag: - -```bash -axoned query logic ask \ - --program-file query.pl \ - "cognitarium_dataset_has_tag('${CONTRACT_ADDR}', 'https://ontology.axone.space/dataverse/dataset/0ea1fc7a-dd97-4adc-a10e-169c6597bcde', 'AwesomeData')." -``` diff --git a/contracts/axone-cognitarium/examples/prolog-query/query.pl b/contracts/axone-cognitarium/examples/prolog-query/query.pl deleted file mode 100644 index e41d3321..00000000 --- a/contracts/axone-cognitarium/examples/prolog-query/query.pl +++ /dev/null @@ -1,110 +0,0 @@ -fold_left(Goal, [H|T], Result) :- - fold_left(Goal, T, H, Result). - -fold_left(Goal, [H|T], Acc, Result) :- - call(Goal, Acc, H, NewAcc), - fold_left(Goal, T, NewAcc, Result). - -fold_left(_Goal, [], Result, Result). - -% Concatenate all the elements in the list using the atom_concat predicate. -atoms_concat(List, Result) :- - fold_left(atom_concat, List, Result). - -% Forge a cosmwasm URI that can be handled by the logic module. Take care of the query json serialization & url encoding. -cosmwasm_query(ContractName, ContractAddr, Query, DecodeBase64, URI) :- - json_prolog(RawQuery, Query), - uri_encoded(query, RawQuery, EncodedQuery), - atoms_concat([ - 'cosmwasm:', - ContractName, - ':', - ContractAddr, - '?query=', - EncodedQuery, - '&base64Decode=', - DecodeBase64 - ], URI). - -% Execute the cosmwasm query by opening the URI stream relying on the logic module interpreter virtual filesystem. -% It then consumes the stream unifying it with its string reprensation, close the stream and make the json conversion of -% the contract response. -cosmwasm_call(URI, Response) :- - open(URI, 'read', Stream), - read_string(Stream, _, Raw), - close(Stream), - json_prolog(Raw, Response). - -% Represents the cognitarium Select query input. -cognitarium_select(Prefixes, Select, Where, Limit, Query) :- - Query = json([ - select-json([ - query-json([ - prefixes-Prefixes, - select-Select, - where-Where, - limit-Limit - ]) - ]) - ]). - -% Extract the bindings field of a cognitarium Select query response. -cognitarium_select_bindings(SelectResponse, Bindings) :- - SelectResponse = json([head-_,results-json([bindings-Bindings])]). - -% Extract the tag variable value from a single Select response binding. -cognitarium_extract_binding_tag(Binding, Tag) :- - Binding = json([tag-json([datatype- @(null),type-literal,value-Tag,'xml:lang'- @(null)])]). - -% Given a cognitarium address and a dataset identifier, resolves the tags contained in any metadata of the type GeneralMetadata. -cognitarium_dataset_tags(CognitariumAddr, DatasetDID, Tags) :- - cognitarium_select( - [ - json([prefix-'rdf', namespace-'http://www.w3.org/1999/02/22-rdf-syntax-ns#']), - json([prefix-'core', namespace-'https://ontology.axone.space/core/']), - json([prefix-'meta', namespace-'https://ontology.axone.space/metadata/dataset/']) - ], - [ - json([variable-'tag']) - ], - [ - json([ - simple-json([ - triple_pattern-json([ - subject-json([variable-'meta']), - predicate-json([node-json([named_node-json([prefixed-'core:describes'])])]), - object-json([node-json([named_node-json([full-DatasetDID])])]) - ]) - ]) - ]), - json([ - simple-json([ - triple_pattern-json([ - subject-json([variable-'meta']), - predicate-json([node-json([named_node-json([prefixed-'rdf:type'])])]), - object-json([node-json([named_node-json([prefixed-'meta:GeneralMetadata'])])]) - ]) - ]) - ]), - json([ - simple-json([ - triple_pattern-json([ - subject-json([variable-'meta']), - predicate-json([node-json([named_node-json([prefixed-'core:hasTag'])])]), - object-json([variable-'tag']) - ]) - ]) - ]) - ], - @(null), - Query - ), - cosmwasm_query(cognitarium, CognitariumAddr, Query, false, URI), - cosmwasm_call(URI, Response), - cognitarium_select_bindings(Response, Bindings), - maplist(cognitarium_extract_binding_tag, Bindings, Tags). - -% True if a given dataset identifier has the given tag through a GeneralMetadata in the provided cognitarium address. -cognitarium_dataset_has_tag(CognitariumAddr, DatasetDID, Tag) :- - cognitarium_dataset_tags(CognitariumAddr, DatasetDID, Tags), - member(Tag, Tags). diff --git a/contracts/axone-cognitarium/examples/sample-data.rdf.xml b/contracts/axone-cognitarium/examples/sample-data.rdf.xml deleted file mode 100644 index 5d3b1565..00000000 --- a/contracts/axone-cognitarium/examples/sample-data.rdf.xml +++ /dev/null @@ -1,89 +0,0 @@ - - - - - - - - - - - A test Data Space. - Un Data Space de test. - AXONE - - AXONE - Test - - Data Space de test - Test Data Space - - - - - - - - - - - - AXONE - - - - Tutorial - AwesomeData - Dataset de test - test Dataset - Me - - A test Dataset. - Un Dataset de test. - - - - - - - 2023-03-28T00:00:00+00:00 - - 2023-03-28T00:00:00+00:00 - - - - - - - - - - - - - AXONE - - - - Tutorial - NotAwesomeData - Dataset de tuto pas bo - ugly tutorial Dataset - Me - - An ugly tutorial Dataset. - Un Dataset de tuto pas bo. - - - - - - - 2023-03-28T00:00:00+00:00 - - 2023-03-28T00:00:00+00:00 - - - diff --git a/contracts/axone-cognitarium/src/bin/schema.rs b/contracts/axone-cognitarium/src/bin/schema.rs deleted file mode 100644 index cf832288..00000000 --- a/contracts/axone-cognitarium/src/bin/schema.rs +++ /dev/null @@ -1,11 +0,0 @@ -use cosmwasm_schema::write_api; - -use axone_cognitarium::msg::{ExecuteMsg, InstantiateMsg, QueryMsg}; - -fn main() { - write_api! { - instantiate: InstantiateMsg, - execute: ExecuteMsg, - query: QueryMsg, - } -} diff --git a/contracts/axone-cognitarium/src/contract.rs b/contracts/axone-cognitarium/src/contract.rs deleted file mode 100644 index 57ba3323..00000000 --- a/contracts/axone-cognitarium/src/contract.rs +++ /dev/null @@ -1,2412 +0,0 @@ -#[cfg(not(feature = "library"))] -use cosmwasm_std::entry_point; -use cosmwasm_std::{ - to_json_binary, Binary, Deps, DepsMut, Env, MessageInfo, Response, StdError, StdResult, -}; -use cw2::set_contract_version; -use cw_utils::nonpayable; - -use crate::error::ContractError; -use crate::msg::{DataFormat, ExecuteMsg, InstantiateMsg, QueryMsg}; -use crate::state::{Store, BLANK_NODE_IDENTIFIER_COUNTER, NAMESPACE_KEY_INCREMENT, STORE}; - -// version info for migration info -const CONTRACT_NAME: &str = concat!("crates.io:", env!("CARGO_PKG_NAME")); -const CONTRACT_VERSION: &str = env!("CARGO_PKG_VERSION"); - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn instantiate( - deps: DepsMut<'_>, - _env: Env, - info: MessageInfo, - msg: InstantiateMsg, -) -> Result { - nonpayable(&info)?; - set_contract_version(deps.storage, CONTRACT_NAME, CONTRACT_VERSION)?; - - STORE.save(deps.storage, &Store::new(info.sender, msg.limits.into()))?; - NAMESPACE_KEY_INCREMENT.save(deps.storage, &0u128)?; - BLANK_NODE_IDENTIFIER_COUNTER.save(deps.storage, &0u128)?; - - Ok(Response::default()) -} - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn execute( - deps: DepsMut<'_>, - _env: Env, - info: MessageInfo, - msg: ExecuteMsg, -) -> Result { - nonpayable(&info)?; - match msg { - ExecuteMsg::InsertData { format, data } => { - execute::insert(deps, info, format.unwrap_or_default(), data) - } - ExecuteMsg::DeleteData { - prefixes, - delete, - r#where, - } => execute::delete(deps, info, prefixes, delete, r#where), - } -} - -pub mod execute { - use super::*; - use crate::msg::DataFormat; - use crate::parser::{Prefix, TripleDeleteTemplate, WhereClause}; - use crate::querier::{PlanBuilder, QueryEngine, QueryPlan, ResolvedVariables}; - use crate::rdf::PrefixMap; - use crate::state::{HasCachedNamespaces, Triple}; - use crate::storer::StoreEngine; - use axone_rdf::serde::TripleReader; - use either::{Left, Right}; - use std::io::BufReader; - - pub fn verify_owner(deps: &DepsMut<'_>, info: &MessageInfo) -> Result<(), ContractError> { - if STORE.load(deps.storage)?.owner != info.sender { - Err(ContractError::Unauthorized) - } else { - Ok(()) - } - } - - pub fn insert( - deps: DepsMut<'_>, - info: MessageInfo, - format: DataFormat, - data: Binary, - ) -> Result { - verify_owner(&deps, &info)?; - - let buf = BufReader::new(data.as_slice()); - let mut reader = TripleReader::new(&(&format).into(), buf); - let mut storer = StoreEngine::new(deps.storage)?; - let count = storer.store_all(&mut reader)?; - - Ok(Response::new() - .add_attribute("action", "insert") - .add_attribute("triple_count", count)) - } - - pub fn delete( - deps: DepsMut<'_>, - info: MessageInfo, - prefixes: Vec, - delete: Vec, - r#where: Option, - ) -> Result { - verify_owner(&deps, &info)?; - - let delete = if delete.is_empty() { - Left(match r#where { - Some(WhereClause::Bgp { ref patterns }) => patterns - .iter() - .map(|p| (p.subject.clone(), p.predicate.clone(), p.object.clone())) - .collect(), - _ => Err(StdError::generic_err("Missing triple templates to delete"))?, - }) - } else { - Right( - delete - .into_iter() - .map(|t| (t.subject, t.predicate, t.object)) - .collect(), - ) - }; - - let prefix_map = ::from(prefixes).into_inner(); - let mut plan_builder = PlanBuilder::new(deps.storage, &prefix_map, None); - let plan = match r#where { - Some(ref w) => plan_builder.build_plan(w)?, - None => QueryPlan::empty_plan(), - }; - - let query_engine = QueryEngine::new(deps.storage, plan_builder.cached_namespaces()); - let delete_templates = query_engine.make_triple_templates(&plan, &prefix_map, delete)?; - - let triples = if r#where.is_none() { - let empty_vars = ResolvedVariables::with_capacity(0); - delete_templates - .into_iter() - .filter_map(|tpl| match tpl.resolve(&empty_vars) { - Ok(Some(v)) => Some(Ok(v)), - Ok(None) => None, - Err(e) => Some(Err(e)), - }) - .collect::>>()? - } else { - query_engine - .construct_triples(plan, delete_templates) - .collect::>>()? - }; - - let mut store = StoreEngine::new(deps.storage)?; - let count = store.delete_all(&triples)?; - - Ok(Response::new() - .add_attribute("action", "delete") - .add_attribute("triple_count", count)) - } -} - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn query(deps: Deps<'_>, _env: Env, msg: QueryMsg) -> StdResult { - match msg { - QueryMsg::Store {} => to_json_binary(&query::store(deps)?), - QueryMsg::Select { query } => to_json_binary(&query::select(deps, query)?), - QueryMsg::Describe { query, format } => { - to_json_binary(&query::describe(deps, query, format.unwrap_or_default())?) - } - QueryMsg::Construct { query, format } => to_json_binary(&query::construct( - deps, - query, - format.unwrap_or(DataFormat::default()), - )?), - } -} - -pub mod query { - use super::*; - use crate::msg::{ConstructResponse, DescribeResponse, SelectResponse, StoreResponse}; - use crate::parser::{ConstructQuery, DescribeQuery, SelectQuery, WhereClause}; - use crate::parser::{ - Node, TripleConstructTemplate, TriplePattern, VarOrNamedNode, VarOrNode, VarOrNodeOrLiteral, - }; - use crate::querier::{PlanBuilder, QueryEngine}; - use crate::rdf::PrefixMap; - use crate::state::HasCachedNamespaces; - use axone_rdf::normalize::IdentifierIssuer; - - pub fn store(deps: Deps<'_>) -> StdResult { - STORE.load(deps.storage).map(Into::into) - } - - pub fn select(deps: Deps<'_>, query: SelectQuery) -> StdResult { - let store = STORE.load(deps.storage)?; - - if query.select.len() > store.limits.max_query_variable_count as usize { - Err(StdError::generic_err( - "Maximum query variable count exceeded", - ))?; - } - - let count = query.limit.unwrap_or(store.limits.max_query_limit); - if count > store.limits.max_query_limit { - Err(StdError::generic_err("Maximum query limit exceeded"))?; - } - - let prefix_map = PrefixMap::from(query.prefixes).into_inner(); - let mut plan_builder = - PlanBuilder::new(deps.storage, &prefix_map, None).with_limit(count as usize); - let plan = plan_builder.build_plan(&query.r#where)?; - - QueryEngine::new(deps.storage, plan_builder.cached_namespaces()) - .select(plan, query.select) - .and_then(|res| util::map_select_solutions(deps, res, plan_builder.cached_namespaces())) - } - - pub fn describe( - deps: Deps<'_>, - query: DescribeQuery, - format: DataFormat, - ) -> StdResult { - let (p, o) = ("_2p".to_owned(), "_3o".to_owned()); - - let (construct, r#where) = match &query.resource { - VarOrNamedNode::Variable(var) => { - let select = TriplePattern { - subject: VarOrNode::Variable(var.clone()), - predicate: VarOrNamedNode::Variable(format!("{var}{p}")), - object: VarOrNodeOrLiteral::Variable(format!("{var}{o}")), - }; - - let r#where = match query.r#where { - Some(c) => WhereClause::LateralJoin { - left: Box::new(c), - right: Box::new(WhereClause::Bgp { - patterns: vec![select.clone()], - }), - }, - None => WhereClause::Bgp { - patterns: vec![select.clone()], - }, - }; - - (vec![select], r#where) - } - VarOrNamedNode::NamedNode(iri) => { - let select = TriplePattern { - subject: VarOrNode::Node(Node::NamedNode(iri.clone())), - predicate: VarOrNamedNode::Variable(p), - object: VarOrNodeOrLiteral::Variable(o), - }; - - ( - vec![select.clone()], - WhereClause::Bgp { - patterns: vec![select], - }, - ) - } - }; - - let out = util::construct_atoms( - deps.storage, - &format, - query.prefixes, - construct - .into_iter() - .map(|t| (t.subject, t.predicate, t.object)) - .collect(), - r#where, - )?; - - Ok(DescribeResponse { - format, - data: Binary::from(out), - }) - } - - pub fn construct( - deps: Deps<'_>, - query: ConstructQuery, - format: DataFormat, - ) -> StdResult { - let ConstructQuery { - construct, - prefixes, - r#where, - } = query; - - let construct = if construct.is_empty() { - match &r#where { - WhereClause::Bgp { patterns } => patterns - .iter() - .map(|p| TripleConstructTemplate { - subject: p.subject.clone(), - predicate: p.predicate.clone(), - object: p.object.clone(), - }) - .collect(), - _ => Err(StdError::generic_err("missing triples to construct"))?, - } - } else { - construct - }; - - let mut id_issuer = IdentifierIssuer::new("a", 0u128); - let construct: Vec<_> = construct - .into_iter() - .map(|t| TripleConstructTemplate { - subject: match t.subject { - VarOrNode::Node(Node::BlankNode(n)) => { - VarOrNode::Node(Node::BlankNode(id_issuer.get_str_or_issue(n).to_string())) - } - _ => t.subject, - }, - predicate: t.predicate, - object: match t.object { - VarOrNodeOrLiteral::Node(Node::BlankNode(n)) => VarOrNodeOrLiteral::Node( - Node::BlankNode(id_issuer.get_str_or_issue(n).to_string()), - ), - _ => t.object, - }, - }) - .collect(); - - let out = util::construct_atoms( - deps.storage, - &format, - prefixes, - construct - .into_iter() - .map(|t| (t.subject, t.predicate, t.object)) - .collect(), - r#where, - )?; - - Ok(ConstructResponse { - format, - data: Binary::from(out), - }) - } -} - -pub mod util { - use super::*; - use crate::msg::{Head, Results, SelectResponse}; - use crate::parser::{ - Prefix, Value, VarOrNamedNode, VarOrNode, VarOrNodeOrLiteral, WhereClause, - }; - use crate::querier::{PlanBuilder, QueryEngine, SelectResults}; - use crate::rdf::{Atom, PrefixMap}; - use crate::state::{HasCachedNamespaces, Namespace, NamespaceResolver}; - use axone_rdf::normalize::IdentifierIssuer; - use axone_rdf::serde::TripleWriter; - use cosmwasm_std::Storage; - use std::collections::BTreeMap; - - pub fn map_select_solutions( - deps: Deps<'_>, - res: SelectResults<'_>, - ns_cache: Vec, - ) -> StdResult { - let mut ns_solver = NamespaceResolver::new(deps.storage, ns_cache); - let mut id_issuer = IdentifierIssuer::new("b", 0u128); - - let mut bindings: Vec> = vec![]; - for solution in res.solutions { - let vars = solution?; - let resolved = vars - .into_iter() - .map(|(name, var)| -> StdResult<(String, Value)> { - Ok((name, var.as_value(&mut ns_solver, &mut id_issuer)?)) - }) - .collect::>>()?; - bindings.push(resolved); - } - - Ok(SelectResponse { - head: Head { vars: res.head }, - results: Results { bindings }, - }) - } - - pub fn construct_atoms( - storage: &dyn Storage, - format: &DataFormat, - prefixes: Vec, - construct: Vec<(VarOrNode, VarOrNamedNode, VarOrNodeOrLiteral)>, - r#where: WhereClause, - ) -> StdResult> { - let store = STORE.load(storage)?; - - let prefix_map = ::from(prefixes).into_inner(); - let mut plan_builder = PlanBuilder::new(storage, &prefix_map, None) - .with_limit(store.limits.max_query_limit as usize); - let plan = plan_builder.build_plan(&r#where)?; - - let atoms = QueryEngine::new(storage, plan_builder.cached_namespaces()) - .construct_atoms(plan, &prefix_map, construct)? - .collect::>>()?; - - let out: Vec = Vec::default(); - let mut writer = TripleWriter::new(&format.into(), out); - - for atom in &atoms { - let triple = atom.into(); - - writer.write(&triple).map_err(|e| { - StdError::serialize_err( - "triple", - format!("Error writing triple {}: {}", &triple, e), - ) - })?; - } - writer - .finish() - .map_err(|e| StdError::serialize_err("triple", format!("Error writing triple: {e}"))) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::error::StoreError; - use crate::msg::ExecuteMsg::{DeleteData, InsertData}; - use crate::msg::{ - ConstructResponse, DescribeResponse, Head, Results, SelectResponse, StoreLimitsInput, - StoreLimitsInputBuilder, StoreResponse, - }; - use crate::parser::Node::{BlankNode, NamedNode}; - use crate::parser::IRI::{Full, Prefixed}; - use crate::parser::{ConstructQuery, DescribeQuery, Prefix, SelectQuery, Value, WhereClause}; - use crate::parser::{ - Literal, SelectItem, TriplePattern, VarOrNamedNode, VarOrNamedNodeOrLiteral, VarOrNode, - VarOrNodeOrLiteral, - }; - use crate::state::{ - namespaces, triples, Namespace, Node, Object, StoreLimits, StoreStat, Subject, Triple, - }; - use crate::{msg, parser, state}; - use cosmwasm_std::testing::{message_info, mock_dependencies, mock_env}; - use cosmwasm_std::{coins, from_json, Addr, Attribute, Order, Uint128}; - use cw_utils::PaymentError; - use cw_utils::PaymentError::NonPayable; - use std::collections::BTreeMap; - use std::fs::File; - use std::io::Read; - use std::path::Path; - use std::{env, u128}; - use testing::addr::{addr, OWNER, SENDER}; - - #[test] - fn proper_initialization() { - let mut deps = mock_dependencies(); - - let msg = InstantiateMsg { - limits: StoreLimitsInput { - max_triple_count: Uint128::from(1u128), - max_byte_size: Uint128::from(2u128), - max_triple_byte_size: Uint128::from(3u128), - max_query_limit: 4, - max_query_variable_count: 5, - max_insert_data_byte_size: Uint128::from(6u128), - max_insert_data_triple_count: Uint128::from(7u128), - }, - }; - - let info = message_info(&addr(OWNER), &[]); - let res = instantiate(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - assert_eq!(0, res.messages.len()); - - let store = STORE.load(&deps.storage).unwrap(); - assert_eq!(store.owner, info.sender); - assert_eq!( - store.limits, - state::StoreLimits { - max_triple_count: Uint128::from(1u128), - max_byte_size: Uint128::from(2u128), - max_triple_byte_size: Uint128::from(3u128), - max_query_limit: 4, - max_query_variable_count: 5, - max_insert_data_byte_size: Uint128::from(6u128), - max_insert_data_triple_count: Uint128::from(7u128), - } - ); - assert_eq!( - store.stat, - StoreStat { - triple_count: Uint128::zero(), - namespace_count: Uint128::zero(), - byte_size: Uint128::zero(), - } - ); - - assert_eq!(NAMESPACE_KEY_INCREMENT.load(&deps.storage).unwrap(), 0u128); - assert_eq!( - BLANK_NODE_IDENTIFIER_COUNTER.load(&deps.storage).unwrap(), - 0u128 - ); - } - - #[test] - fn funds_initialization() { - let mut deps = mock_dependencies(); - let env = mock_env(); - let info = message_info(&addr(SENDER), &coins(10, "uaxone")); - - let msg = InstantiateMsg::default(); - - let result = instantiate(deps.as_mut(), env, info, msg); - assert!(result.is_err()); - assert_eq!(result.unwrap_err(), ContractError::Payment(NonPayable {})); - } - - #[test] - fn execute_fail_with_funds() { - let mut deps = mock_dependencies(); - let env = mock_env(); - let info = message_info(&addr("sender"), &coins(10, "uaxone")); - - let messages = vec![ - InsertData { - format: Some(DataFormat::RDFXml), - data: Binary::from("data".as_bytes()), - }, - DeleteData { - prefixes: vec![], - delete: vec![], - r#where: None, - }, - ]; - - for msg in messages { - let result = execute(deps.as_mut(), env.clone(), info.clone(), msg); - assert!(result.is_err()); - assert_eq!( - result.unwrap_err(), - ContractError::Payment(PaymentError::NonPayable {}) - ); - } - } - - #[test] - fn proper_insert() { - let cases = vec![ - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - InsertData { - format: Some(DataFormat::Turtle), - data: read_test_data("sample.ttl"), - }, - InsertData { - format: Some(DataFormat::NTriples), - data: read_test_data("sample.nt"), - }, - InsertData { - format: Some(DataFormat::NQuads), - data: read_test_data("sample.nq"), - }, - InsertData { - format: None, - data: read_test_data("sample.ttl"), - }, - ]; - - for case in cases { - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - let res = execute(deps.as_mut(), mock_env(), info.clone(), case); - - assert!(res.is_ok()); - assert_eq!( - res.unwrap().attributes, - vec![ - Attribute::new("action", "insert"), - Attribute::new("triple_count", "40"), - ] - ); - - assert_eq!( - triples() - .range_raw(&deps.storage, None, None, Order::Ascending) - .count(), - 40 - ); - assert_eq!( - STORE.load(&deps.storage).unwrap().stat, - StoreStat { - triple_count: 40u128.into(), - namespace_count: 17u128.into(), - byte_size: 7190u128.into(), - }, - ); - assert_eq!(NAMESPACE_KEY_INCREMENT.load(&deps.storage).unwrap(), 17u128); - assert_eq!( - namespaces() - .load( - &deps.storage, - "https://ontology.axone.space/dataverse/dataspace/".to_string(), - ) - .unwrap(), - Namespace { - value: "https://ontology.axone.space/dataverse/dataspace/".to_string(), - key: 0u128, - counter: 5u128, - } - ); - assert_eq!( - triples() - .load( - &deps.storage, - ( - Object::Named(Node { - namespace: 4u128, - value: "0x04d1f1b8f8a7a28f9a5a254c326a963a22f5a5b5d5f5e5d5c5b5a5958575655" - .to_string(), - }).as_hash() - .as_bytes(), - Node { - namespace: 3u128, - value: "hasRegistrar".to_string(), - } - .key(), - Subject::Named(Node { - namespace: 0u128, - value: "97ff7e16-c08d-47be-8475-211016c82e33".to_string(), - }) - .key() - ), - ) - .unwrap(), - Triple { - object: Object::Named(Node { - namespace: 4u128, - value: "0x04d1f1b8f8a7a28f9a5a254c326a963a22f5a5b5d5f5e5d5c5b5a5958575655" - .to_string(), - }), - predicate: Node { - namespace: 3u128, - value: "hasRegistrar".to_string(), - }, - subject: Subject::Named(Node { - namespace: 0u128, - value: "97ff7e16-c08d-47be-8475-211016c82e33".to_string(), - }), - } - ) - } - } - - #[test] - fn proper_insert_blank_nodes() { - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - let insert_msg = InsertData { - format: None, - data: read_test_data("blank-nodes.ttl"), - }; - - let res = execute(deps.as_mut(), mock_env(), info.clone(), insert_msg.clone()); - assert!(res.is_ok()); - assert_eq!( - BLANK_NODE_IDENTIFIER_COUNTER.load(&deps.storage).unwrap(), - 2u128 - ); - - // we insert the same data again to check the creation of new blank nodes - let res = execute(deps.as_mut(), mock_env(), info.clone(), insert_msg); - assert!(res.is_ok()); - assert_eq!( - BLANK_NODE_IDENTIFIER_COUNTER.load(&deps.storage).unwrap(), - 4u128 - ); - } - - #[test] - fn insert_existing_triples() { - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - execute( - deps.as_mut(), - mock_env(), - info.clone(), - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - ) - .unwrap(); - - let res = execute( - deps.as_mut(), - mock_env(), - info.clone(), - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - ); - - assert!(res.is_ok()); - assert_eq!( - res.unwrap().attributes, - vec![ - Attribute::new("action", "insert"), - Attribute::new("triple_count", "0"), - ] - ); - - assert_eq!( - triples() - .range_raw(&deps.storage, None, None, Order::Ascending) - .count(), - 40 - ); - assert_eq!( - STORE.load(&deps.storage).unwrap().stat, - StoreStat { - triple_count: 40u128.into(), - namespace_count: 17u128.into(), - byte_size: 7190u128.into(), - }, - ); - assert_eq!(NAMESPACE_KEY_INCREMENT.load(&deps.storage).unwrap(), 17u128); - } - - #[test] - fn insert_unauthorized() { - let mut deps = mock_dependencies(); - instantiate( - deps.as_mut(), - mock_env(), - message_info(&addr(OWNER), &[]), - InstantiateMsg::default(), - ) - .unwrap(); - - let res = execute( - deps.as_mut(), - mock_env(), - message_info(&addr("not-owner"), &[]), - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - ); - assert!(res.is_err()); - assert_eq!(res.err().unwrap(), ContractError::Unauthorized); - } - - #[test] - fn insert_limits() { - let cases = vec![ - ( - StoreLimitsInputBuilder::default() - .max_triple_count(30u128) - .build() - .unwrap(), - Some(ContractError::from(StoreError::TripleCount(30u128.into()))), - ), - ( - StoreLimitsInputBuilder::default() - .max_triple_count(40u128) - .build() - .unwrap(), - None, - ), - ( - StoreLimitsInputBuilder::default() - .max_byte_size(50u128) - .build() - .unwrap(), - Some(ContractError::from(StoreError::ByteSize(50u128.into()))), - ), - ( - StoreLimitsInputBuilder::default() - .max_byte_size(50000u128) - .build() - .unwrap(), - None, - ), - ( - StoreLimitsInputBuilder::default() - .max_insert_data_byte_size(500u128) - .build() - .unwrap(), - Some(ContractError::from(StoreError::InsertDataByteSize( - 500u128.into(), - ))), - ), - ( - StoreLimitsInputBuilder::default() - .max_insert_data_byte_size(50000u128) - .build() - .unwrap(), - None, - ), - ( - StoreLimitsInputBuilder::default() - .max_triple_byte_size(150u128) - .build() - .unwrap(), - Some(ContractError::from(StoreError::TripleByteSize( - 177u128.into(), - 150u128.into(), - ))), - ), - ( - StoreLimitsInputBuilder::default() - .max_triple_byte_size(400u128) - .build() - .unwrap(), - None, - ), - ( - StoreLimitsInputBuilder::default() - .max_insert_data_triple_count(30u128) - .build() - .unwrap(), - Some(ContractError::from(StoreError::InsertDataTripleCount( - 30u128.into(), - ))), - ), - ( - StoreLimitsInputBuilder::default() - .max_insert_data_triple_count(40u128) - .build() - .unwrap(), - None, - ), - ]; - - let exec_msg = InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }; - for case in cases { - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg { limits: case.0 }, - ) - .unwrap(); - - let res = execute(deps.as_mut(), mock_env(), info.clone(), exec_msg.clone()); - - if let Some(err) = case.1 { - assert!(res.is_err()); - assert_eq!(res.err().unwrap(), err); - } else { - assert!(res.is_ok()); - } - } - } - - #[test] - fn proper_delete() { - let id = "https://ontology.axone.space/dataverse/dataspace/metadata/dcf48417-01c5-4b43-9bc7-49e54c028473"; - let cases = vec![ - ( - DeleteData { - prefixes: vec![], - delete: vec![parser::TripleDeleteTemplate { - subject: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/dataverse/dataspace/metadata/unknown" - .to_string(), - )), - predicate: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/core/hasTopic".to_string(), - )), - object: VarOrNamedNodeOrLiteral::NamedNode(Full( - "https://ontology.axone.space/thesaurus/topic/Test".to_string(), - )), - }], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(NamedNode(Full( - "https://ontology.axone.space/dataverse/dataspace/metadata/unknown" - .to_string(), - ))), - predicate: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/core/hasTopic".to_string(), - )), - object: VarOrNodeOrLiteral::Node(NamedNode(Full( - "https://ontology.axone.space/thesaurus/topic/Test".to_string(), - ))), - }], - } - .into(), - }, - 0, - 0, - Uint128::from(7190u128), - ), - ( - DeleteData { - prefixes: vec![], - delete: vec![parser::TripleDeleteTemplate { - subject: VarOrNamedNode::NamedNode(Full(id.to_string())), - predicate: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/core/hasTopic".to_string(), - )), - object: VarOrNamedNodeOrLiteral::NamedNode(Full( - "https://ontology.axone.space/thesaurus/topic/Test".to_string(), - )), - }], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(NamedNode(Full(id.to_string()))), - predicate: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/core/hasTopic".to_string(), - )), - object: VarOrNodeOrLiteral::Node(NamedNode(Full( - "https://ontology.axone.space/thesaurus/topic/Test".to_string(), - ))), - }], - } - .into(), - }, - 1, - 0, - Uint128::from(7005u128), - ), - ( - DeleteData { - prefixes: vec![ - Prefix { - prefix: "core".to_string(), - namespace: "https://ontology.axone.space/core/".to_string(), - }, - Prefix { - prefix: "thesaurus".to_string(), - namespace: "https://ontology.axone.space/thesaurus/topic/".to_string(), - }, - ], - delete: vec![parser::TripleDeleteTemplate { - subject: VarOrNamedNode::NamedNode(Full(id.to_string())), - predicate: VarOrNamedNode::NamedNode(Prefixed("core:hasTopic".to_string())), - object: VarOrNamedNodeOrLiteral::NamedNode(Prefixed( - "thesaurus:Test".to_string(), - )), - }], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(NamedNode(Full(id.to_string()))), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasTopic".to_string(), - )), - object: VarOrNodeOrLiteral::Node(NamedNode(Prefixed( - "thesaurus:Test".to_string(), - ))), - }], - } - .into(), - }, - 1, - 0, - Uint128::from(7005u128), - ), - ( - DeleteData { - prefixes: vec![ - Prefix { - prefix: "core".to_string(), - namespace: "https://ontology.axone.space/core/".to_string(), - }, - Prefix { - prefix: "thesaurus".to_string(), - namespace: "https://ontology.axone.space/thesaurus/topic/".to_string(), - }, - ], - delete: vec![parser::TripleDeleteTemplate { - subject: VarOrNamedNode::NamedNode(Full(id.to_string())), - predicate: VarOrNamedNode::NamedNode(Prefixed("core:hasTopic".to_string())), - object: VarOrNamedNodeOrLiteral::Variable("o".to_string()), - }], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(NamedNode(Full(id.to_string()))), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasTopic".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - }], - } - .into(), - }, - 1, - 0, - Uint128::from(7005u128), - ), - ( - DeleteData { - prefixes: vec![], - delete: vec![parser::TripleDeleteTemplate { - subject: VarOrNamedNode::NamedNode(Full(id.to_string())), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNamedNodeOrLiteral::Variable("o".to_string()), - }], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(NamedNode(Full(id.to_string()))), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - }], - } - .into(), - }, - 11, - 2, - Uint128::from(5334u128), - ), - ( - DeleteData { - prefixes: vec![], - delete: vec![], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(NamedNode(Full(id.to_string()))), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - }], - } - .into(), - }, - 11, - 2, - Uint128::from(5334u128), - ), - ( - DeleteData { - prefixes: vec![], - delete: vec![], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Variable("s".to_string()), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Variable("0".to_string()), - }], - } - .into(), - }, - 40, - 17, - Uint128::from(0u128), - ), - ( - DeleteData { - prefixes: vec![ - Prefix { - prefix: "core".to_string(), - namespace: "https://ontology.axone.space/core/".to_string(), - }, - Prefix { - prefix: "thesaurus".to_string(), - namespace: "https://ontology.axone.space/thesaurus/topic/".to_string(), - }, - ], - delete: vec![parser::TripleDeleteTemplate { - subject: VarOrNamedNode::NamedNode(Full(id.to_string())), - predicate: VarOrNamedNode::NamedNode(Prefixed("core:hasTopic".to_string())), - object: VarOrNamedNodeOrLiteral::NamedNode(Prefixed( - "thesaurus:Test".to_string(), - )), - }], - r#where: None, - }, - 1, - 0, - Uint128::from(7005u128), - ), - ]; - - for case in cases { - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - execute( - deps.as_mut(), - mock_env(), - info.clone(), - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - ) - .unwrap(); - - let res = execute(deps.as_mut(), mock_env(), info, case.0); - - assert!(res.is_ok()); - assert_eq!( - res.unwrap().attributes, - vec![ - Attribute::new("action", "delete"), - Attribute::new("triple_count", case.1.to_string()), - ] - ); - - assert_eq!( - STORE.load(&deps.storage).unwrap().stat, - StoreStat { - triple_count: (40u128 - u128::try_from(case.1).unwrap()).into(), - namespace_count: (17u128 - u128::try_from(case.2).unwrap()).into(), - byte_size: case.3, - }, - ); - assert_eq!( - triples() - .range_raw(&deps.storage, None, None, Order::Ascending) - .count(), - 40 - case.1 - ); - assert_eq!( - namespaces() - .range_raw(&deps.storage, None, None, Order::Ascending) - .count(), - 17 - case.2 - ); - } - } - - #[test] - fn invalid_delete() { - struct TC { - command: ExecuteMsg, - expected: ContractError, - } - let cases = vec![ - TC { - command: DeleteData { - prefixes: vec![], - delete: vec![parser::TripleDeleteTemplate { - subject: VarOrNamedNode::NamedNode(Prefixed("foo:bar".to_string())), - predicate: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/core/hasTopic".to_string(), - )), - object: VarOrNamedNodeOrLiteral::NamedNode(Full( - "https://ontology.axone.space/thesaurus/topic/Test".to_string(), - )), - }], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(NamedNode(Prefixed("foo:bar".to_string()))), - predicate: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/core/hasTopic".to_string(), - )), - object: VarOrNodeOrLiteral::Node(NamedNode(Full( - "https://ontology.axone.space/thesaurus/topic/Test".to_string(), - ))), - }], - } - .into(), - }, - expected: StdError::generic_err("Prefix not found: foo").into(), - }, - TC { - command: DeleteData { - prefixes: vec![], - delete: vec![parser::TripleDeleteTemplate { - subject: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/thesaurus/topic/Test".to_string(), - )), - predicate: VarOrNamedNode::Variable("z".to_string()), - object: VarOrNamedNodeOrLiteral::Variable("o".to_string()), - }], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(NamedNode(Full( - "https://ontology.axone.space/thesaurus/topic/Test".to_string(), - ))), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - }], - } - .into(), - }, - expected: StdError::generic_err("Selected variable not found in query").into(), - }, - ]; - - for case in cases { - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - execute( - deps.as_mut(), - mock_env(), - info.clone(), - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - ) - .unwrap(); - - let res = execute(deps.as_mut(), mock_env(), info, case.command); - - assert!(res.is_err()); - assert_eq!(res.unwrap_err(), case.expected); - } - } - - #[test] - fn proper_store() { - let mut deps = mock_dependencies(); - STORE - .save( - deps.as_mut().storage, - &Store { - owner: Addr::unchecked(OWNER), - limits: StoreLimits { - max_triple_count: 1u128.into(), - max_byte_size: 2u128.into(), - max_triple_byte_size: 3u128.into(), - max_query_limit: 4u32, - max_query_variable_count: 5u32, - max_insert_data_byte_size: 6u128.into(), - max_insert_data_triple_count: 7u128.into(), - }, - stat: StoreStat { - triple_count: 1u128.into(), - namespace_count: 2u128.into(), - byte_size: 3u128.into(), - }, - }, - ) - .unwrap(); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Store {}); - assert!(res.is_ok()); - assert_eq!( - from_json::(&res.unwrap()).unwrap(), - StoreResponse { - owner: OWNER.to_string(), - limits: msg::StoreLimits { - max_triple_count: 1u128.into(), - max_byte_size: 2u128.into(), - max_triple_byte_size: 3u128.into(), - max_query_limit: 4u32, - max_query_variable_count: 5u32, - max_insert_data_byte_size: 6u128.into(), - max_insert_data_triple_count: 7u128.into(), - }, - stat: msg::StoreStat { - triple_count: 1u128.into(), - namespace_count: 2u128.into(), - byte_size: 3u128.into(), - }, - } - ); - } - - fn read_test_data(file: &str) -> Binary { - let mut bytes: Vec = Vec::new(); - - File::open( - Path::new(&env::var("CARGO_MANIFEST_DIR").unwrap()) - .join("testdata") - .join(file), - ) - .unwrap() - .read_to_end(&mut bytes) - .unwrap(); - - Binary::from(bytes) - } - - #[test] - fn proper_select() { - let cases = vec![ - ( - SelectQuery { - prefixes: vec![], - select: vec![ - SelectItem::Variable("a".to_string()), - SelectItem::Variable("b".to_string()), - ], - r#where: WhereClause::Bgp{patterns:vec![TriplePattern { - subject: VarOrNode::Variable("a".to_string()), - predicate: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/core/hasDescription".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("b".to_string()), - }, - ]}, - limit: None, - }, - SelectResponse { - head: Head { - vars: vec!["a".to_string(), "b".to_string()], - }, - results: Results { - bindings: vec![ - BTreeMap::from([ - ( - "a".to_string(), - Value::URI { - value: Full("https://ontology.axone.space/dataverse/dataspace/metadata/dcf48417-01c5-4b43-9bc7-49e54c028473".to_string()) - } - ), - ( - "b".to_string(), - Value::Literal { - value: "A test Data Space.".to_string(), - lang: Some("en".to_string()), - datatype: None, - } - ) - ]), - BTreeMap::from([ - ( - "a".to_string(), - Value::URI { - value: Full("https://ontology.axone.space/dataverse/dataset/metadata/d1615703-4ee1-4e2f-997e-15aecf1eea4e".to_string()) - } - ), - ( - "b".to_string(), - Value::Literal { - value: "Un Dataset de test.".to_string(), - lang: Some("fr".to_string()), - datatype: None, - } - ) - ]), - BTreeMap::from([ - ( - "a".to_string(), - Value::URI { - value: Full("https://ontology.axone.space/dataverse/dataset/metadata/d1615703-4ee1-4e2f-997e-15aecf1eea4e".to_string()) - } - ), - ( - "b".to_string(), - Value::Literal { - value: "A test Dataset.".to_string(), - lang: Some("en".to_string()), - datatype: None, - } - ) - ]), - BTreeMap::from([ - ( - "a".to_string(), - Value::URI { - value: Full("https://ontology.axone.space/dataverse/dataspace/metadata/dcf48417-01c5-4b43-9bc7-49e54c028473".to_string()) - } - ), - ( - "b".to_string(), - Value::Literal { - value: "Un Data Space de test.".to_string(), - lang: Some("fr".to_string()), - datatype: None, - } - ) - ]), - ], - }, - }, - ), - ( - SelectQuery { - prefixes: vec![Prefix { prefix: "core".to_string(), namespace: "https://ontology.axone.space/core/".to_string() }], - select: vec![ - SelectItem::Variable("a".to_string()), - ], - r#where: WhereClause::Bgp{patterns:vec![TriplePattern { - subject: VarOrNode::Variable("a".to_string()), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasDescription".to_string(), - )), - object: VarOrNodeOrLiteral::Literal(Literal::LanguageTaggedString { value: "A test Dataset.".to_string(), language: "en".to_string() }), - }, - ]}, - limit: None, - }, - SelectResponse { - head: Head { - vars: vec!["a".to_string()], - }, - results: Results { - bindings: vec![ - BTreeMap::from([ - ( - "a".to_string(), - Value::URI { - value: Full("https://ontology.axone.space/dataverse/dataset/metadata/d1615703-4ee1-4e2f-997e-15aecf1eea4e".to_string()) - } - ), - ]) - ], - }, - }, - ), - ( - SelectQuery { - prefixes: vec![], - select: vec![ - SelectItem::Variable("a".to_string()), - ], - r#where: WhereClause::Bgp{patterns:vec![TriplePattern { - subject: VarOrNode::Node(NamedNode(Full("https://ontology.axone.space/dataverse/dataset/metadata/d1615703-4ee1-4e2f-997e-15aecf1eea4e".to_string()))), - predicate: VarOrNamedNode::Variable("a".to_string()), - object: VarOrNodeOrLiteral::Literal(Literal::LanguageTaggedString { value: "A test Dataset.".to_string(), language: "en".to_string() }), - }, - ]}, - limit: None, - }, - SelectResponse { - head: Head { - vars: vec!["a".to_string()], - }, - results: Results { - bindings: vec![ - BTreeMap::from([ - ( - "a".to_string(), - Value::URI { - value: Full("https://ontology.axone.space/core/hasDescription".to_string()) - } - ), - ]) - ], - }, - }, - ), - ]; - - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - execute( - deps.as_mut(), - mock_env(), - info, - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - ) - .unwrap(); - - for (q, expected) in cases { - let res = query(deps.as_ref(), mock_env(), QueryMsg::Select { query: q }); - assert!(res.is_ok()); - - let result = from_json::(&res.unwrap()).unwrap(); - assert_eq!(result, expected); - } - } - - #[test] - fn proper_select_blank_nodes() { - let cases = vec![ - ( - SelectQuery { - prefixes: vec![Prefix { prefix: "core".to_string(), namespace: "https://ontology.axone.space/core/".to_string() }], - select: vec![SelectItem::Variable("a".to_string()), SelectItem::Variable("b".to_string())], - r#where: WhereClause::Bgp{patterns:vec![ - TriplePattern { - subject: VarOrNode::Variable("a".to_string()), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasTemporalCoverage".to_string(), - )), - object: VarOrNodeOrLiteral::Node(BlankNode("a".to_string())), - }, - TriplePattern { - subject: VarOrNode::Node(BlankNode("a".to_string())), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasStartDate".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("b".to_string()), - }, - ]}, - limit: None, - }, - SelectResponse { - head: Head { vars: vec!["a".to_string(), "b".to_string()] }, - results: Results { - bindings: vec![ - BTreeMap::from([ - ( - "a".to_string(), - Value::URI { - value: Full("https://ontology.axone.space/dataverse/dataset/metadata/80b1f84e-86dc-4730-b54f-701ad9b1888a".to_string()) - } - ), - ( - "b".to_string(), - Value::Literal { - value: "2022-01-01T00:00:00+00:00".to_string(), - lang: None, - datatype: Some(Full("http://www.w3.org/2001/XMLSchema#dateTime".to_string())), - } - ) - ]) - ], - }, - }, - ), - ( - SelectQuery { - prefixes: vec![Prefix { prefix: "core".to_string(), namespace: "https://ontology.axone.space/core/".to_string() }], - select: vec![SelectItem::Variable("a".to_string()), SelectItem::Variable("b".to_string())], - r#where: WhereClause::Bgp{patterns:vec![ - TriplePattern { - subject: VarOrNode::Variable("a".to_string()), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasTemporalCoverage".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("blank".to_string()), - }, - TriplePattern { - subject: VarOrNode::Variable("blank".to_string()), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasStartDate".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("b".to_string()), - } - ]}, - limit: None, - }, - SelectResponse { - head: Head { vars: vec!["a".to_string(), "b".to_string()] }, - results: Results { - bindings: vec![ - BTreeMap::from([ - ( - "a".to_string(), - Value::URI { - value: Full("https://ontology.axone.space/dataverse/dataset/metadata/80b1f84e-86dc-4730-b54f-701ad9b1888a".to_string()) - } - ), - ( - "b".to_string(), - Value::Literal { - value: "2022-01-01T00:00:00+00:00".to_string(), - lang: None, - datatype: Some(Full("http://www.w3.org/2001/XMLSchema#dateTime".to_string())), - } - ) - ]) - ], - }, - }, - ), - ( - SelectQuery { - prefixes: vec![Prefix { prefix: "core".to_string(), namespace: "https://ontology.axone.space/core/".to_string() }], - select: vec![SelectItem::Variable("a".to_string()), SelectItem::Variable("b".to_string())], - r#where: WhereClause::Bgp{patterns:vec![ - TriplePattern { - subject: VarOrNode::Variable("a".to_string()), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasTemporalCoverage".to_string(), - )), - object: VarOrNodeOrLiteral::Node(BlankNode("blank1".to_string())), - }, - TriplePattern { - subject: VarOrNode::Node(BlankNode("blank2".to_string())), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasInformation".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("b".to_string()), - }, - ]}, - limit: None, - }, - SelectResponse { - head: Head { vars: vec!["a".to_string(), "b".to_string()] }, - results: Results { - bindings: vec![ - BTreeMap::from([ - ( - "a".to_string(), - Value::URI { - value: Full("https://ontology.axone.space/dataverse/dataset/metadata/80b1f84e-86dc-4730-b54f-701ad9b1888a".to_string()) - } - ), - ( - "b".to_string(), - Value::Literal { - value: "this is a dataset".to_string(), - lang: None, - datatype: None, - } - ) - ]) - ], - }, - }, - ), - ( - SelectQuery { - prefixes: vec![Prefix { prefix: "core".to_string(), namespace: "https://ontology.axone.space/core/".to_string() }], - select: vec![SelectItem::Variable("a".to_string()), SelectItem::Variable("b".to_string())], - r#where: WhereClause::Bgp{patterns:vec![ - TriplePattern { - subject: VarOrNode::Variable("a".to_string()), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasTemporalCoverage".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("b".to_string()), - }, - ]}, - limit: None, - }, - SelectResponse { - head: Head { vars: vec!["a".to_string(), "b".to_string()] }, - results: Results { - bindings: vec![ - BTreeMap::from([ - ( - "a".to_string(), - Value::URI { - value: Full("https://ontology.axone.space/dataverse/dataset/metadata/80b1f84e-86dc-4730-b54f-701ad9b1888a".to_string()) - } - ), - ( - "b".to_string(), - Value::BlankNode { - value: "b0".to_string(), - } - ) - ]) - ], - }, - }, - ), - ]; - - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - execute( - deps.as_mut(), - mock_env(), - info, - InsertData { - format: Some(DataFormat::Turtle), - data: read_test_data("blank-nodes.ttl"), - }, - ) - .unwrap(); - - for (q, expected) in cases { - let res = query(deps.as_ref(), mock_env(), QueryMsg::Select { query: q }); - assert!(res.is_ok()); - - let result = from_json::(&res.unwrap()).unwrap(); - assert_eq!(result, expected); - } - } - - #[test] - fn invalid_select() { - let cases = vec![ - ( - SelectQuery { - prefixes: vec![], - select: vec![ - SelectItem::Variable("a".to_string()), - SelectItem::Variable("b".to_string()), - ], - r#where: WhereClause::Bgp { patterns: vec![] }, - limit: None, - }, - Err(StdError::generic_err( - "Maximum query variable count exceeded", - )), - ), - ( - SelectQuery { - prefixes: vec![], - select: vec![], - r#where: WhereClause::Bgp { patterns: vec![] }, - limit: Some(8000), - }, - Err(StdError::generic_err("Maximum query limit exceeded")), - ), - ( - SelectQuery { - prefixes: vec![Prefix { - prefix: "core".to_string(), - namespace: "https://ontology.axone.space/core/".to_string(), - }], - select: vec![SelectItem::Variable("a".to_string())], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Variable("a".to_string()), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "invalid:hasDescription".to_string(), - )), - object: VarOrNodeOrLiteral::Literal(Literal::LanguageTaggedString { - value: "A test Dataset.".to_string(), - language: "en".to_string(), - }), - }], - }, - limit: None, - }, - Err(StdError::generic_err("Prefix not found: invalid")), - ), - ( - SelectQuery { - prefixes: vec![], - select: vec![SelectItem::Variable("u".to_string())], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Variable("a".to_string()), - predicate: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/core/hasDescription".to_string(), - )), - object: VarOrNodeOrLiteral::Literal(Literal::LanguageTaggedString { - value: "A test Dataset.".to_string(), - language: "en".to_string(), - }), - }], - }, - limit: None, - }, - Err(StdError::generic_err( - "Selected variable not found in query", - )), - ), - ]; - - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg { - limits: StoreLimitsInput { - max_query_variable_count: 1, - ..Default::default() - }, - }, - ) - .unwrap(); - - execute( - deps.as_mut(), - mock_env(), - info, - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - ) - .unwrap(); - - for (q, expected) in cases { - let res = query(deps.as_ref(), mock_env(), QueryMsg::Select { query: q }); - assert_eq!(res, expected); - } - } - - #[test] - fn formats_describe() { - let cases = vec![ - ( - QueryMsg::Describe { - query: DescribeQuery { - prefixes: vec![], - resource: VarOrNamedNode::NamedNode(Full("https://ontology.axone.space/dataverse/dataspace/metadata/dcf48417-01c5-4b43-9bc7-49e54c028473".to_string())), - r#where: None, - }, - format: Some(DataFormat::Turtle), - }, - DescribeResponse { - format: DataFormat::Turtle, - data: Binary::from( - " , ; -\t \"Test\" , \"AXONE\" ; -\t \"Data Space de test\"@fr , \"Test Data Space\"@en ; -\t ; -\t ; -\t \"AXONE\" ; -\t \"A test Data Space.\"@en , \"Un Data Space de test.\"@fr . -\ - ".to_string().as_bytes().to_vec()), - } - ), - ( - QueryMsg::Describe { - query: DescribeQuery { - prefixes: vec![], - resource: VarOrNamedNode::NamedNode(Full("https://ontology.axone.space/dataverse/dataspace/metadata/dcf48417-01c5-4b43-9bc7-49e54c028473".to_string())), - r#where: None, - }, - format: Some(DataFormat::RDFXml), - }, - DescribeResponse { - format: DataFormat::RDFXml, - data: Binary::from( - "\ -\ -\ -\ -TestAXONE\ -Data Space de test\ -Test Data Space\ -\ -\ -AXONE\ -A test Data Space.\ -Un Data Space de test.\ -\ -\ - ".to_string().as_bytes().to_vec()), - } - ), - ( - QueryMsg::Describe { - query: DescribeQuery { - prefixes: vec![], - resource: VarOrNamedNode::NamedNode(Full("https://ontology.axone.space/dataverse/dataspace/metadata/dcf48417-01c5-4b43-9bc7-49e54c028473".to_string())), - r#where: WhereClause::Bgp { patterns: vec![ - TriplePattern { - subject: VarOrNode::Variable("a".to_string()), - predicate: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/core/hasDescription".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("b".to_string()), - }, - ]}.into(), - }, - format: Some(DataFormat::NTriples), - }, - DescribeResponse { - format: DataFormat::NTriples, - data: Binary::from( - " . - . - \"Test\" . - \"AXONE\" . - \"Data Space de test\"@fr . - \"Test Data Space\"@en . - . - . - \"AXONE\" . - \"A test Data Space.\"@en . - \"Un Data Space de test.\"@fr . -\ - ".to_string().as_bytes().to_vec()), - } - ), - ( - QueryMsg::Describe { - query: DescribeQuery { - prefixes: vec![], - resource: VarOrNamedNode::NamedNode(Full("https://ontology.axone.space/dataverse/dataspace/metadata/dcf48417-01c5-4b43-9bc7-49e54c028473".to_string())), - r#where: None, - }, - format: Some(DataFormat::NQuads), - }, - DescribeResponse { - format: DataFormat::NQuads, - data: Binary::from( - " . - . - \"Test\" . - \"AXONE\" . - \"Data Space de test\"@fr . - \"Test Data Space\"@en . - . - . - \"AXONE\" . - \"A test Data Space.\"@en . - \"Un Data Space de test.\"@fr . -\ - ".to_string().as_bytes().to_vec()), - } - ), - ]; - - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - execute( - deps.as_mut(), - mock_env(), - info, - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - ) - .unwrap(); - - for (q, expected) in cases { - let res = query(deps.as_ref(), mock_env(), q); - - assert!(res.is_ok()); - - let result = from_json::(&res.unwrap()).unwrap(); - - assert_eq!(result.format, expected.format); - assert_eq!( - String::from_utf8_lossy(&result.data), - String::from_utf8_lossy(&expected.data) - ); - } - } - - #[test] - fn prefixes_describe() { - let cases = vec![ - ( - QueryMsg::Describe { - query: DescribeQuery { - prefixes: vec![ - Prefix { - prefix: "metadata".to_string(), - namespace: "https://ontology.axone.space/dataverse/dataspace/metadata/".to_string(), - }, - ], - resource: VarOrNamedNode::NamedNode(Prefixed("metadata:dcf48417-01c5-4b43-9bc7-49e54c028473".to_string())), - r#where: None, - }, - format: Some(DataFormat::Turtle), - }, - DescribeResponse { - format: DataFormat::Turtle, - data: Binary::from( - " , ; -\t \"Test\" , \"AXONE\" ; -\t \"Data Space de test\"@fr , \"Test Data Space\"@en ; -\t ; -\t ; -\t \"AXONE\" ; -\t \"A test Data Space.\"@en , \"Un Data Space de test.\"@fr . -\ - ".to_string().as_bytes().to_vec()), - } - ), - ]; - - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - execute( - deps.as_mut(), - mock_env(), - info, - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - ) - .unwrap(); - - for (q, expected) in cases { - let res = query(deps.as_ref(), mock_env(), q); - - assert!(res.is_ok()); - - let result = from_json::(&res.unwrap()).unwrap(); - - assert_eq!(result.format, expected.format); - assert_eq!( - String::from_utf8_lossy(&result.data), - String::from_utf8_lossy(&expected.data) - ); - } - } - - #[test] - fn variable_describe() { - let cases = vec![ - ( - QueryMsg::Describe { - query: DescribeQuery { - prefixes: vec![Prefix { prefix: "core".to_string(), namespace: "https://ontology.axone.space/core/".to_string() }], - resource: VarOrNamedNode::Variable("a".to_string()), - r#where: WhereClause::Bgp {patterns: vec![ - TriplePattern { - subject: VarOrNode::Variable("a".to_string()), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasDescription".to_string(), - )), - object: VarOrNodeOrLiteral::Literal(Literal::LanguageTaggedString { value: "A test Dataset.".to_string(), language: "en".to_string() }), - }, - ]}.into(), - }, - format: Some(DataFormat::Turtle), - }, - DescribeResponse { - format: DataFormat::Turtle, - data: Binary::from( - " , ;\n\t \"test\" ;\n\t \"test Dataset\"@en , \"Dataset de test\"@fr ;\n\t ;\n\t ;\n\t ;\n\t \"Me\" ;\n\t ;\n\t \"AXONE\" ;\n\t \"Un Dataset de test.\"@fr , \"A test Dataset.\"@en .\n".to_string().as_bytes().to_vec()), - } - ), - ]; - - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - execute( - deps.as_mut(), - mock_env(), - info, - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - ) - .unwrap(); - - for (q, expected) in cases { - let res = query(deps.as_ref(), mock_env(), q); - - assert!(res.is_ok()); - - let result = from_json::(&res.unwrap()).unwrap(); - - assert_eq!(result.format, expected.format); - assert_eq!( - String::from_utf8_lossy(&result.data), - String::from_utf8_lossy(&expected.data) - ); - } - } - - #[test] - fn variable_multiple_resources_describe() { - let cases = vec![ - ( - QueryMsg::Describe { - query: DescribeQuery { - prefixes: vec![Prefix { prefix: "core".to_string(), namespace: "https://ontology.axone.space/core/".to_string() }], - resource: VarOrNamedNode::Variable("a".to_string()), - r#where: WhereClause::Bgp {patterns: vec![ - TriplePattern { - subject: VarOrNode::Variable("a".to_string()), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasPublisher".to_string(), - )), - object: VarOrNodeOrLiteral::Literal(Literal::Simple("AXONE".to_string())), - }, - ]}.into(), - }, - format: Some(DataFormat::Turtle), - }, - DescribeResponse { - format: DataFormat::Turtle, - data: Binary::from( - " , ;\n\t \"Test\" , \"AXONE\" ;\n\t \"Data Space de test\"@fr , \"Test Data Space\"@en ;\n\t ;\n\t ;\n\t \"AXONE\" ;\n\t \"A test Data Space.\"@en , \"Un Data Space de test.\"@fr .\n , ;\n\t \"test\" ;\n\t \"test Dataset\"@en , \"Dataset de test\"@fr ;\n\t ;\n\t ;\n\t ;\n\t \"Me\" ;\n\t ;\n\t \"AXONE\" ;\n\t \"Un Dataset de test.\"@fr , \"A test Dataset.\"@en .\n".to_string().as_bytes().to_vec()), - } - ), - ]; - - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - execute( - deps.as_mut(), - mock_env(), - info, - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - ) - .unwrap(); - - for (q, expected) in cases { - let res = query(deps.as_ref(), mock_env(), q); - - assert!(res.is_ok()); - - let result = from_json::(&res.unwrap()).unwrap(); - - assert_eq!(result.format, expected.format); - assert_eq!( - String::from_utf8_lossy(&result.data), - String::from_utf8_lossy(&expected.data) - ); - } - } - - #[test] - fn blanknode_describe() { - let cases = vec![ - ( - QueryMsg::Describe { - query: DescribeQuery { - prefixes: vec![ - Prefix { prefix: "core".to_string(), namespace: "https://ontology.axone.space/core/".to_string() }, - Prefix { prefix: "metadata-dataset".to_string(), namespace: "https://ontology.axone.space/dataverse/dataset/metadata/".to_string() }, - ], - resource: VarOrNamedNode::Variable("x".to_string()), - r#where: WhereClause::Bgp {patterns: vec![ - TriplePattern { - subject: VarOrNode::Node(NamedNode(Prefixed("metadata-dataset:80b1f84e-86dc-4730-b54f-701ad9b1888a".to_string()))), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasTemporalCoverage".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("x".to_string()), - }, - ]}.into(), - }, - format: Some(DataFormat::Turtle), - }, - DescribeResponse { - format: DataFormat::Turtle, - data: Binary::from( - " , ;\n\t \"2022-01-01T00:00:00+00:00\"^^ .\n".to_string().as_bytes().to_vec()), - } - ), - ]; - - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg::default(), - ) - .unwrap(); - - execute( - deps.as_mut(), - mock_env(), - info, - InsertData { - format: Some(DataFormat::Turtle), - data: read_test_data("blank-nodes.ttl"), - }, - ) - .unwrap(); - - for (q, expected) in cases { - let res = query(deps.as_ref(), mock_env(), q); - - assert!(res.is_ok()); - - let result = from_json::(&res.unwrap()).unwrap(); - - assert_eq!(result.format, expected.format); - assert_eq!( - String::from_utf8_lossy(&result.data), - String::from_utf8_lossy(&expected.data) - ); - } - } - - #[test] - fn proper_construct() { - let id = "https://ontology.axone.space/dataverse/dataspace/metadata/dcf48417-01c5-4b43-9bc7-49e54c028473"; - let cases = vec![ - ( - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - QueryMsg::Construct { - query: ConstructQuery { - prefixes: vec![], - construct: vec![], - r#where: WhereClause::Bgp{patterns:vec![TriplePattern { - subject: VarOrNode::Node(NamedNode(Full(id.to_string()))), - predicate: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/core/hasTag".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - }]}, - }, - format: None, - }, - ConstructResponse { - format: DataFormat::Turtle, - data: Binary::from( - " \"Test\" , \"AXONE\" .\n".to_string().as_bytes().to_vec()), - }, - ), - ( - InsertData { - format: Some(DataFormat::RDFXml), - data: read_test_data("sample.rdf.xml"), - }, - QueryMsg::Construct { - query: ConstructQuery { - prefixes: vec![ - Prefix { prefix: "my-ns".to_string(), namespace: "https://my-ns.org/".to_string() }, - Prefix { prefix: "metadata-dataset".to_string(), namespace: "https://ontology.axone.space/dataverse/dataset/metadata/".to_string() }, - ], - construct: vec![ - parser::TripleConstructTemplate { - subject: VarOrNode::Node(NamedNode(Prefixed("my-ns:instance-1".to_string()))), - predicate: VarOrNamedNode::NamedNode(Full( - "https://my-ns/predicate/tag".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - } - ], - r#where: WhereClause::Bgp{patterns:vec![TriplePattern { - subject: VarOrNode::Node(NamedNode(Full(id.to_string()))), - predicate: VarOrNamedNode::NamedNode(Full( - "https://ontology.axone.space/core/hasTag".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - }]}, - }, - format: Some(DataFormat::NTriples), - }, - ConstructResponse { - format: DataFormat::NTriples, - data: Binary::from( - " \"Test\" .\n \"AXONE\" .\n".to_string().as_bytes().to_vec()), - }, - ), - ( - InsertData { - format: Some(DataFormat::Turtle), - data: read_test_data("blank-nodes.ttl"), - }, - QueryMsg::Construct { - query: ConstructQuery { - prefixes: vec![ - Prefix { prefix: "core".to_string(), namespace: "https://ontology.axone.space/core/".to_string() }, - Prefix { prefix: "metadata-dataset".to_string(), namespace: "https://ontology.axone.space/dataverse/dataset/metadata/".to_string() }, - ], - construct: vec![ - parser::TripleConstructTemplate { - subject: VarOrNode::Node(BlankNode("my-metadata".to_string())), - predicate: VarOrNamedNode::NamedNode(Full( - "https://my-ns/predicate/tcov".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("tcov".to_string()), - }, - parser::TripleConstructTemplate { - subject: VarOrNode::Node(BlankNode("my-metadata".to_string())), - predicate: VarOrNamedNode::NamedNode(Full( - "https://my-ns/predicate/info".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("info".to_string()), - }, - parser::TripleConstructTemplate { - subject: VarOrNode::Variable("tcov".to_string()), - predicate: VarOrNamedNode::Variable("tcov_p".to_string()), - object: VarOrNodeOrLiteral::Variable("tcov_o".to_string()), - }, - parser::TripleConstructTemplate { - subject: VarOrNode::Variable("info".to_string()), - predicate: VarOrNamedNode::Variable("info_p".to_string()), - object: VarOrNodeOrLiteral::Variable("info_o".to_string()), - } - ], - r#where: WhereClause::Bgp {patterns:vec![ - TriplePattern { - subject: VarOrNode::Node(NamedNode(Prefixed("metadata-dataset:80b1f84e-86dc-4730-b54f-701ad9b1888a".to_string()))), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasTemporalCoverage".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("tcov".to_string()), - }, - TriplePattern { - subject: VarOrNode::Node(NamedNode(Prefixed("metadata-dataset:80b1f84e-86dc-4730-b54f-701ad9b1888a".to_string()))), - predicate: VarOrNamedNode::NamedNode(Prefixed( - "core:hasInformations".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("info".to_string()), - }, - TriplePattern { - subject: VarOrNode::Variable("tcov".to_string()), - predicate: VarOrNamedNode::Variable("tcov_p".to_string()), - object: VarOrNodeOrLiteral::Variable("tcov_o".to_string()), - }, - TriplePattern { - subject: VarOrNode::Variable("info".to_string()), - predicate: VarOrNamedNode::Variable("info_p".to_string()), - object: VarOrNodeOrLiteral::Variable("info_o".to_string()), - } - ]}, - }, - format: Some(DataFormat::NTriples), - }, - ConstructResponse { - format: DataFormat::NTriples, - data: Binary::from( - " .\n .\n .\n \"this is a dataset\" .\n .\n .\n .\n \"this is a dataset\" .\n .\n .\n \"2022-01-01T00:00:00+00:00\"^^ .\n \"this is a dataset\" .\n".to_string().as_bytes().to_vec()), - }, - ), - ]; - - for (data, q, expected) in cases { - let mut deps = mock_dependencies(); - - let info = message_info(&addr(OWNER), &[]); - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg { - limits: StoreLimitsInput::default(), - }, - ) - .unwrap(); - - execute(deps.as_mut(), mock_env(), info.clone(), data).unwrap(); - - let res = query(deps.as_ref(), mock_env(), q); - - assert!(res.is_ok()); - - let result = from_json::(&res.unwrap()).unwrap(); - - assert_eq!(result.format, expected.format); - assert_eq!( - String::from_utf8_lossy(&result.data), - String::from_utf8_lossy(&expected.data) - ); - } - } -} diff --git a/contracts/axone-cognitarium/src/error.rs b/contracts/axone-cognitarium/src/error.rs deleted file mode 100644 index 16b302a3..00000000 --- a/contracts/axone-cognitarium/src/error.rs +++ /dev/null @@ -1,77 +0,0 @@ -use cosmwasm_std::{StdError, Uint128}; -use cw_utils::PaymentError; -use rio_turtle::TurtleError; -use rio_xml::RdfXmlError; -use thiserror::Error; - -#[derive(Debug, Error, PartialEq)] -pub enum ContractError { - #[error("{0}")] - Std(#[from] StdError), - - #[error("{0}")] - ParseRDF(#[from] RDFParseError), - - #[error("{0}")] - FormatRDF(String), - - #[error("{0}")] - Store(#[from] StoreError), - - #[error("Only the owner can perform this operation.")] - Unauthorized, - - #[error("{0}")] - Payment(#[from] PaymentError), -} - -impl From for ContractError { - fn from(value: RdfXmlError) -> Self { - RDFParseError::from(value).into() - } -} - -impl From for ContractError { - fn from(value: TurtleError) -> Self { - RDFParseError::from(value).into() - } -} - -#[derive(Debug, Eq, Error, PartialEq)] -pub enum StoreError { - #[error("Maximum triples number exceeded: {0}")] - TripleCount(Uint128), - - #[error("Maximum byte size exceeded: {0}")] - ByteSize(Uint128), - - #[error("Maximum triple byte size exceeded: {0} / {1}")] - TripleByteSize(Uint128, Uint128), - - #[error("Maximum insert byte size exceeded: {0}")] - InsertDataByteSize(Uint128), - - #[error("Maximum insert triple count exceeded: {0}")] - InsertDataTripleCount(Uint128), -} - -#[derive(Debug, Eq, Error, PartialEq)] -pub enum RDFParseError { - #[error("Error parsing XML RDF: {0}")] - Xml(String), - - #[error("Error parsing Turtle RDF: {0}")] - Turtle(String), -} - -impl From for RDFParseError { - fn from(value: RdfXmlError) -> Self { - RDFParseError::Xml(value.to_string()) - } -} - -impl From for RDFParseError { - fn from(value: TurtleError) -> Self { - RDFParseError::Xml(value.to_string()) - } -} diff --git a/contracts/axone-cognitarium/src/lib.rs b/contracts/axone-cognitarium/src/lib.rs deleted file mode 100644 index 14419671..00000000 --- a/contracts/axone-cognitarium/src/lib.rs +++ /dev/null @@ -1,10 +0,0 @@ -pub mod contract; -mod error; -pub mod msg; -pub mod parser; -mod querier; -mod rdf; -pub mod state; -mod storer; - -pub use crate::error::ContractError; diff --git a/contracts/axone-cognitarium/src/msg.rs b/contracts/axone-cognitarium/src/msg.rs deleted file mode 100644 index 354be5e3..00000000 --- a/contracts/axone-cognitarium/src/msg.rs +++ /dev/null @@ -1,392 +0,0 @@ -use crate::parser::{ - ConstructQuery, DescribeQuery, Prefix, SelectQuery, TripleDeleteTemplate, Value, WhereClause, -}; -use cosmwasm_schema::{cw_serde, QueryResponses}; -use cosmwasm_std::{Binary, Uint128}; -use derive_builder::Builder; -use std::collections::BTreeMap; - -/// Instantiate message -#[cw_serde] -#[derive(Default)] -pub struct InstantiateMsg { - /// Limitations regarding store usage. - #[serde(default)] - pub limits: StoreLimitsInput, -} - -/// Execute messages -#[cw_serde] -pub enum ExecuteMsg { - /// # InsertData - /// Insert the data as RDF triples in the store. - /// For already existing triples it acts as no-op. - /// - /// Only the smart contract owner (i.e. the address who instantiated it) is authorized to perform - /// this action. - InsertData { - /// The data format in which the triples are serialized. - /// If not provided, the default format is [Turtle](https://www.w3.org/TR/turtle/) format. - format: Option, - /// The data to insert. - /// The data must be serialized in the format specified by the `format` field. And the data - /// are subject to the limitations defined by the `limits` specified at contract instantiation. - data: Binary, - }, - - /// # DeleteData - /// Delete the data (RDF triples) from the store matching the patterns defined by the provided - /// query. For non-existing triples it acts as no-op. - /// - /// Example: - /// ```json - /// { - /// "prefixes": [ - /// { "prefix": "foaf", "namespace": "http://xmlns.com/foaf/0.1/" } - /// ], - /// "delete": [ - /// { - /// "subject": { "variable": "s" }, - /// "predicate": { "variable": "p" }, - /// "object": { "variable": "o" } - /// } - /// ], - /// "where": [ - /// { "simple": { "triplePattern": { - /// "subject": { "variable": "s" }, - /// "predicate": { "namedNode": {"prefixed": "foaf:givenName"} }, - /// "object": { "literal": { "simple": "Myrddin" } } - /// } } }, - /// { "simple": { "triplePattern": { - /// "subject": { "variable": "s" }, - /// "predicate": { "variable": "p" }, - /// "object": { "variable": "o" } - /// } } } - /// ] - /// ``` - /// - /// Only the smart contract owner (i.e. the address who instantiated it) is authorized to perform - /// this action. - DeleteData { - /// The prefixes used in the operation. - prefixes: Vec, - /// Specifies the specific triple templates to delete. - /// If nothing is provided and the `where` clause is a single Bgp, the patterns are used for - /// deletion. - delete: Vec, - /// Defines the patterns that data (RDF triples) should match in order for it to be - /// considered for deletion, if any. - r#where: Option, - }, -} - -/// # SelectQuery -/// Query messages -#[cw_serde] -#[derive(QueryResponses)] -pub enum QueryMsg { - /// # Store - /// - /// Returns information about the triple store. - #[returns(StoreResponse)] - Store {}, - - /// # Select - /// - /// Returns the resources matching the criteria defined by the provided query. - /// - #[returns(SelectResponse)] - Select { - /// The query to execute. - query: SelectQuery, - }, - - /// # Describe - /// - /// Returns a description of the resource identified by the provided IRI as a set of RDF triples - /// serialized in the provided format. - #[returns(DescribeResponse)] - Describe { - /// The query to execute. - query: DescribeQuery, - /// The format in which the triples are serialized. - /// If not provided, the default format is [Turtle](https://www.w3.org/TR/turtle/) format. - format: Option, - }, - - /// # Construct - /// - /// Returns the resources matching the criteria defined by the provided query as a set of RDF - /// triples serialized in the provided format. - #[returns(ConstructResponse)] - Construct { - /// The query to execute. - query: ConstructQuery, - /// The format in which the triples are serialized. - /// If not provided, the default format is [Turtle](https://www.w3.org/TR/turtle/) format. - format: Option, - }, -} - -/// # DataFormat -/// Represents the format in which the data are serialized, for example when returned by a query or -/// when inserted in the store. -#[cw_serde] -#[derive(Default)] -pub enum DataFormat { - /// # RDF XML - /// Output in [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) format. - #[serde(rename = "rdf_xml")] - RDFXml, - /// # Turtle - /// Output in [Turtle](https://www.w3.org/TR/turtle/) format. - #[serde(rename = "turtle")] - #[default] - Turtle, - /// # N-Triples - /// Output in [N-Triples](https://www.w3.org/TR/n-triples/) format. - #[serde(rename = "n_triples")] - NTriples, - /// # N-Quads - /// Output in [N-Quads](https://www.w3.org/TR/n-quads/) format. - #[serde(rename = "n_quads")] - NQuads, -} - -impl From<&DataFormat> for axone_rdf::serde::DataFormat { - fn from(value: &DataFormat) -> Self { - match value { - DataFormat::RDFXml => Self::RDFXml, - DataFormat::Turtle => Self::Turtle, - DataFormat::NTriples => Self::NTriples, - DataFormat::NQuads => Self::NQuads, - } - } -} - -/// # StoreLimitsInput -/// Contains requested limitations regarding store usages. -#[cw_serde] -#[derive(Builder)] -#[builder(default, setter(into, strip_option))] -pub struct StoreLimitsInput { - /// The maximum number of triples the store can contain. - /// Default to [Uint128::MAX] if not set, which can be considered as no limit. - #[serde(default = "StoreLimitsInput::default_max_triple_count")] - pub max_triple_count: Uint128, - /// The maximum number of bytes the store can contain. - /// The size of a triple is counted as the sum of the size of its subject, predicate and object, - /// including the size of data types and language tags if any. - /// Default to [Uint128::MAX] if not set, which can be considered as no limit. - #[serde(default = "StoreLimitsInput::default_max_byte_size")] - pub max_byte_size: Uint128, - /// The maximum number of bytes the store can contain for a single triple. - /// The size of a triple is counted as the sum of the size of its subject, predicate and object, - /// including the size of data types and language tags if any. The limit is used to prevent - /// storing very large triples, especially literals. - /// Default to [Uint128::MAX] if not set, which can be considered as no limit. - #[serde(default = "StoreLimitsInput::default_max_triple_byte_size")] - pub max_triple_byte_size: Uint128, - /// The maximum limit of a query, i.e. the maximum number of triples returned by a select query. - /// Default to 30 if not set. - #[serde(default = "StoreLimitsInput::default_max_query_limit")] - pub max_query_limit: u32, - /// The maximum number of variables a query can select. - /// Default to 30 if not set. - #[serde(default = "StoreLimitsInput::default_max_query_variable_count")] - pub max_query_variable_count: u32, - /// The maximum number of bytes an insert data query can contain. - /// Default to [Uint128::MAX] if not set, which can be considered as no limit. - #[serde(default = "StoreLimitsInput::default_max_insert_data_byte_size")] - pub max_insert_data_byte_size: Uint128, - /// The maximum number of triples an insert data query can contain (after parsing). - /// Default to [Uint128::MAX] if not set, which can be considered as no limit. - #[serde(default = "StoreLimitsInput::default_max_insert_data_triple_count")] - pub max_insert_data_triple_count: Uint128, -} - -impl StoreLimitsInput { - const fn default_max_query_limit() -> u32 { - 30 - } - const fn default_max_query_variable_count() -> u32 { - 30 - } - const fn default_max_triple_count() -> Uint128 { - Uint128::MAX - } - const fn default_max_byte_size() -> Uint128 { - Uint128::MAX - } - const fn default_max_triple_byte_size() -> Uint128 { - Uint128::MAX - } - const fn default_max_insert_data_byte_size() -> Uint128 { - Uint128::MAX - } - const fn default_max_insert_data_triple_count() -> Uint128 { - Uint128::MAX - } -} - -impl Default for StoreLimitsInput { - fn default() -> Self { - Self { - max_triple_count: Self::default_max_triple_count(), - max_byte_size: Self::default_max_byte_size(), - max_triple_byte_size: Self::default_max_triple_byte_size(), - max_query_limit: Self::default_max_query_limit(), - max_query_variable_count: Self::default_max_query_variable_count(), - max_insert_data_byte_size: Self::default_max_insert_data_byte_size(), - max_insert_data_triple_count: Self::default_max_insert_data_triple_count(), - } - } -} - -/// # StoreResponse -/// -/// Contains information related to triple store. -#[cw_serde] -pub struct StoreResponse { - /// The store owner. - pub owner: String, - - /// The store limits. - pub limits: StoreLimits, - - /// The store current usage. - pub stat: StoreStat, -} - -/// # StoreLimits -/// Contains limitations regarding store usages. -#[cw_serde] -#[derive(Builder, Default)] -#[builder(default, setter(into, strip_option))] -pub struct StoreLimits { - /// The maximum number of triples the store can contain. - pub max_triple_count: Uint128, - - /// The maximum number of bytes the store can contain. - /// The size of a triple is counted as the sum of the size of its subject, predicate and object, - /// including the size of data types and language tags if any. - pub max_byte_size: Uint128, - - /// The maximum number of bytes the store can contain for a single triple. - /// The size of a triple is counted as the sum of the size of its subject, predicate and object, - /// including the size of data types and language tags if any. The limit is used to prevent - /// storing very large triples, especially literals. - pub max_triple_byte_size: Uint128, - - /// The maximum limit of a query, i.e. the maximum number of triples returned by a select query. - pub max_query_limit: u32, - - /// The maximum number of variables a query can select. - pub max_query_variable_count: u32, - - /// The maximum number of bytes an insert data query can contain. - pub max_insert_data_byte_size: Uint128, - - /// The maximum number of triples an insert data query can contain (after parsing). - pub max_insert_data_triple_count: Uint128, -} - -/// # StoreStat -/// -/// Contains usage information about the triple store. -#[cw_serde] -pub struct StoreStat { - /// The total number of triple present in the store. - pub triple_count: Uint128, - - /// The total number of IRI namespace present in the store. - pub namespace_count: Uint128, - - /// The total triple size in the store, in bytes. - pub byte_size: Uint128, -} - -/// # SelectResponse -/// Represents the response of a [QueryMsg::Select] query. -#[cw_serde] -pub struct SelectResponse { - /// The head of the response, i.e. the set of variables mentioned in the results. - pub head: Head, - /// The results of the select query. - pub results: Results, -} - -/// # DescribeResponse -/// Represents the response of a [QueryMsg::Describe] query. -#[cw_serde] -pub struct DescribeResponse { - /// The format of the data. - pub format: DataFormat, - /// The data serialized in the specified format. - pub data: Binary, -} - -/// # ConstructResponse -/// Represents the response of a [QueryMsg::Construct] query. -#[cw_serde] -pub struct ConstructResponse { - /// The format of the data. - pub format: DataFormat, - /// The data serialized in the specified format. - pub data: Binary, -} - -/// # Head -/// Represents the head of a [SelectResponse]. -#[cw_serde] -pub struct Head { - /// The variables selected in the query. - pub vars: Vec, -} - -/// # Results -/// Represents the results of a [SelectResponse]. -#[cw_serde] -pub struct Results { - /// The bindings of the results. - pub bindings: Vec>, -} - -#[cfg(test)] -mod tests { - use crate::msg::{InstantiateMsg, StoreLimitsInput}; - use cosmwasm_std::Uint128; - use schemars::_serde_json; - - #[test] - fn store_limit_default_deserialization() { - let json = r#" - {} - "#; - - let input: StoreLimitsInput = _serde_json::from_str(json).unwrap(); - assert_eq!(input.max_query_limit, 30); - assert_eq!(input.max_query_variable_count, 30); - assert_eq!(input.max_byte_size, Uint128::MAX); - assert_eq!(input.max_triple_count, Uint128::MAX); - assert_eq!(input.max_triple_byte_size, Uint128::MAX); - assert_eq!(input.max_insert_data_byte_size, Uint128::MAX); - assert_eq!(input.max_insert_data_triple_count, Uint128::MAX); - } - - #[test] - fn instantiate_default_deserialization() { - let json = r#" - {} - "#; - let msg: InstantiateMsg = _serde_json::from_str(json).unwrap(); - - assert_eq!(msg.limits.max_query_limit, 30); - assert_eq!(msg.limits.max_query_variable_count, 30); - assert_eq!(msg.limits.max_byte_size, Uint128::MAX); - assert_eq!(msg.limits.max_triple_count, Uint128::MAX); - assert_eq!(msg.limits.max_triple_byte_size, Uint128::MAX); - assert_eq!(msg.limits.max_insert_data_byte_size, Uint128::MAX); - assert_eq!(msg.limits.max_insert_data_triple_count, Uint128::MAX); - } -} diff --git a/contracts/axone-cognitarium/src/parser/ast.rs b/contracts/axone-cognitarium/src/parser/ast.rs deleted file mode 100644 index dd6f4033..00000000 --- a/contracts/axone-cognitarium/src/parser/ast.rs +++ /dev/null @@ -1,295 +0,0 @@ -use cosmwasm_schema::cw_serde; - -/// # IRI -/// Represents an IRI. -#[cw_serde] -pub enum IRI { - /// # Prefixed - /// An IRI prefixed with a prefix. - /// The prefixed IRI is expanded to a full IRI using the prefix definition specified in the query. - /// For example, the prefixed IRI `rdf:type` is expanded to `http://www.w3.org/1999/02/22-rdf-syntax-ns#type`. - Prefixed(String), - /// # Full - /// A full IRI. - Full(String), -} - -/// # Value -#[cw_serde] -#[serde(tag = "type")] -pub enum Value { - /// # URI - /// Represents an IRI. - #[serde(rename = "uri")] - URI { - /// The value of the IRI. - value: IRI, - }, - /// # Literal - /// Represents a literal S with optional language tag L or datatype IRI D. - Literal { - /// The value of the literal. - value: String, - /// The language tag of the literal. - #[serde(rename = "xml:lang")] - lang: Option, - /// The datatype of the literal. - datatype: Option, - }, - /// # BlankNode - /// Represents a blank node. - BlankNode { - /// The identifier of the blank node. - value: String, - }, -} - -/// # SelectQuery -/// Represents a SELECT query over the triple store, allowing to select variables to return -/// and to filter the results. -#[cw_serde] -pub struct SelectQuery { - /// The prefixes used in the query. - pub prefixes: Vec, - /// The items to select. - /// Note: the number of items to select cannot exceed the maximum query variable count defined - /// in the store limitations. - pub select: Vec, - /// The WHERE clause. - /// If `None`, there is no WHERE clause, i.e. all triples are returned without filtering. - pub r#where: WhereClause, - /// The maximum number of results to return. - /// If `None`, there is no limit. - /// Note: the value of the limit cannot exceed the maximum query limit defined in the store - /// limitations. - pub limit: Option, -} - -/// # DescribeQuery -/// Represents a DESCRIBE query over the triple store, allowing to retrieve a description of a resource -/// as a set of triples serialized in a specific format. -#[cw_serde] -pub struct DescribeQuery { - /// The prefixes used in the query. - pub prefixes: Vec, - /// The resource to describe given as a variable or a node. - pub resource: VarOrNamedNode, - /// The WHERE clause. - /// This clause is used to specify the resource identifier to describe using variable bindings. - pub r#where: Option, -} - -/// # ConstructQuery -/// Represents a CONSTRUCT query over the triple store, allowing to retrieve a set of triples -/// serialized in a specific format. -#[cw_serde] -pub struct ConstructQuery { - /// The prefixes used in the query. - pub prefixes: Vec, - /// The triples to construct. - /// If nothing is provided and the `where` clause is a single Bgp, the patterns are used for - /// construction. - pub construct: Vec, - /// The WHERE clause. - /// This clause is used to specify the triples to construct using variable bindings. - pub r#where: WhereClause, -} - -/// # Prefix -/// Represents a prefix, i.e. a shortcut for a namespace used in a query. -#[cw_serde] -pub struct Prefix { - /// The prefix. - pub prefix: String, - /// The namespace associated with the prefix. - pub namespace: String, -} - -/// # SelectItem -/// Represents an item to select in a [SelectQuery]. -#[cw_serde] -pub enum SelectItem { - /// # Variable - /// Represents a variable. - Variable(String), -} - -/// # WhereClause -/// Represents a WHERE clause, i.e. a set of conditions to filter the results. -#[cw_serde] -pub enum WhereClause { - /// # Bgp - /// Represents a basic graph pattern expressed as a set of triple patterns. - Bgp { patterns: Vec }, - - /// # LateralJoin - /// Evaluates right for all result row of left - LateralJoin { left: Box, right: Box }, - - /// # Filter - /// Filters the inner clause matching the expression. - /// The solutions coming from the inner clause that do not match the expression are discarded. - /// The variables provided in the inner clause are available in the filter expression. - Filter { expr: Expression, inner: Box }, -} - -/// # Expression -/// Represents a logical combination of operations whose evaluation results in a term. -#[cw_serde] -pub enum Expression { - /// A named node constant. - NamedNode(IRI), - /// A literal constant. - Literal(Literal), - /// A variable that must be bound for evaluation. - Variable(String), - /// Logical conjunction of expressions. - /// All expressions must evaluate to true for the conjunction to be true. - /// If the conjunction is empty, it is considered true. - And(Vec), - /// Logical disjunction of expressions. - /// At least one expression must evaluate to true for the disjunction to be true. - /// If the disjunction is empty, it is considered false. - Or(Vec), - /// Equality comparison. - Equal(Box, Box), - /// Greater than comparison. - Greater(Box, Box), - /// Greater or equal comparison. - GreaterOrEqual(Box, Box), - /// Less than comparison. - Less(Box, Box), - /// Less or equal comparison. - LessOrEqual(Box, Box), - /// Negation of an expression. - Not(Box), -} - -/// # TripleDeleteTemplate -/// Represents a triple template to be deleted. -#[cw_serde] -pub struct TripleDeleteTemplate { - /// The subject of the triple pattern. - pub subject: VarOrNamedNode, - /// The predicate of the triple pattern. - pub predicate: VarOrNamedNode, - /// The object of the triple pattern. - pub object: VarOrNamedNodeOrLiteral, -} - -/// # TripleConstructTemplate -/// Represents a triple template to be forged for a construct query. -#[cw_serde] -pub struct TripleConstructTemplate { - /// The subject of the triple pattern. - pub subject: VarOrNode, - /// The predicate of the triple pattern. - pub predicate: VarOrNamedNode, - /// The object of the triple pattern. - pub object: VarOrNodeOrLiteral, -} - -/// # TriplePattern -/// Represents a triple pattern in a [SimpleWhereCondition]. -#[cw_serde] -pub struct TriplePattern { - /// The subject of the triple pattern. - pub subject: VarOrNode, - /// The predicate of the triple pattern. - pub predicate: VarOrNamedNode, - /// The object of the triple pattern. - pub object: VarOrNodeOrLiteral, -} - -/// # VarOrNode -/// Represents either a variable or a node. -#[cw_serde] -pub enum VarOrNode { - /// # Variable - /// A variable. - Variable(String), - /// # Node - /// A node, i.e. an IRI or a blank node. - Node(Node), -} - -/// # VarOrNamedNode -/// Represents either a variable or a named node (IRI). -#[cw_serde] -pub enum VarOrNamedNode { - /// # Variable - /// A variable. - Variable(String), - /// # NamedNode - /// An RDF [IRI](https://www.w3.org/TR/rdf11-concepts/#dfn-iri). - NamedNode(IRI), -} - -/// # VarOrNodeOrLiteral -/// Represents either a variable, a node or a literal. -#[cw_serde] -pub enum VarOrNodeOrLiteral { - /// # Variable - /// A variable. - Variable(String), - /// # Node - /// A node, i.e. an IRI or a blank node. - Node(Node), - /// # Literal - /// An RDF [literal](https://www.w3.org/TR/rdf11-concepts/#dfn-literal), i.e. a simple literal, - /// a language-tagged string or a typed value. - Literal(Literal), -} - -/// # VarOrNamedNodeOrLiteral -/// Represents either a variable, a named node or a literal. -#[cw_serde] -pub enum VarOrNamedNodeOrLiteral { - /// # Variable - /// A variable. - Variable(String), - /// # NamedNode - /// An RDF [IRI](https://www.w3.org/TR/rdf11-concepts/#dfn-iri). - NamedNode(IRI), - /// # Literal - /// An RDF [literal](https://www.w3.org/TR/rdf11-concepts/#dfn-literal), i.e. a simple literal, - /// a language-tagged string or a typed value. - Literal(Literal), -} - -/// # Literal -/// An RDF [literal](https://www.w3.org/TR/rdf11-concepts/#dfn-literal). -#[cw_serde] -pub enum Literal { - /// # Simple - /// A [simple literal](https://www.w3.org/TR/rdf11-concepts/#dfn-simple-literal) without datatype or language form. - Simple(String), - /// # LanguageTaggedString - /// A [language-tagged string](https://www.w3.org/TR/rdf11-concepts/#dfn-language-tagged-string) - LanguageTaggedString { - /// The [lexical form](https://www.w3.org/TR/rdf11-concepts/#dfn-lexical-form). - value: String, - /// The [language tag](https://www.w3.org/TR/rdf11-concepts/#dfn-language-tag). - language: String, - }, - /// # TypedValue - /// A value with a datatype. - TypedValue { - /// The [lexical form](https://www.w3.org/TR/rdf11-concepts/#dfn-lexical-form). - value: String, - /// The [datatype IRI](https://www.w3.org/TR/rdf11-concepts/#dfn-datatype-iri). - datatype: IRI, - }, -} - -/// # Node -/// Represents either an IRI (named node) or a blank node. -#[cw_serde] -pub enum Node { - /// # NamedNode - /// An RDF [IRI](https://www.w3.org/TR/rdf11-concepts/#dfn-iri). - NamedNode(IRI), - /// # BlankNode - /// An RDF [blank node](https://www.w3.org/TR/rdf11-concepts/#dfn-blank-node). - BlankNode(String), -} diff --git a/contracts/axone-cognitarium/src/parser/mod.rs b/contracts/axone-cognitarium/src/parser/mod.rs deleted file mode 100644 index f47153fd..00000000 --- a/contracts/axone-cognitarium/src/parser/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod ast; - -pub use crate::parser::ast::*; diff --git a/contracts/axone-cognitarium/src/querier/engine.rs b/contracts/axone-cognitarium/src/querier/engine.rs deleted file mode 100644 index 37e17680..00000000 --- a/contracts/axone-cognitarium/src/querier/engine.rs +++ /dev/null @@ -1,1836 +0,0 @@ -use crate::parser::{ - Node, SelectItem, VarOrNamedNode, VarOrNamedNodeOrLiteral, VarOrNode, VarOrNodeOrLiteral, -}; -use crate::querier::expression::Expression; -use crate::querier::mapper::{iri_as_node, literal_as_object}; -use crate::querier::plan::{PatternValue, QueryNode, QueryPlan}; -use crate::querier::variable::{ResolvedVariable, ResolvedVariables}; -use crate::rdf::Atom; -use crate::state::{ - triples, Namespace, NamespaceResolver, NamespaceSolver, Object, Predicate, Subject, Triple, -}; -use crate::{rdf, state}; -use axone_rdf::normalize::IdentifierIssuer; -use cosmwasm_std::{Order, StdError, StdResult, Storage}; -use either::{Either, Left, Right}; -use std::collections::{BTreeMap, HashMap, VecDeque}; -use std::iter; -use std::rc::Rc; - -pub struct QueryEngine<'a> { - storage: &'a dyn Storage, - ns_cache: Vec, -} - -pub struct SelectResults<'a> { - pub head: Vec, - pub solutions: SolutionsIterator<'a>, -} - -impl<'a> QueryEngine<'a> { - pub fn new(storage: &'a dyn Storage, ns_cache: Vec) -> Self { - Self { storage, ns_cache } - } - - pub fn select( - &'a self, - plan: QueryPlan, - selection: Vec, - ) -> StdResult> { - let bindings = selection - .iter() - .map(|item| match item { - SelectItem::Variable(v) => v, - }) - .map(|name| -> StdResult<(String, usize)> { - match plan.get_var_index(name) { - Some(index) => Ok((name.clone(), index)), - None => Err(StdError::generic_err( - "Selected variable not found in query", - )), - } - }) - .collect::>>()?; - - Ok(SelectResults { - head: bindings.keys().cloned().collect(), - solutions: SolutionsIterator::new(self.eval_plan(plan), bindings), - }) - } - - pub fn construct_atoms( - &'a self, - plan: QueryPlan, - prefixes: &HashMap, - templates: Vec<(VarOrNode, VarOrNamedNode, VarOrNodeOrLiteral)>, - ) -> StdResult> { - let templates = templates - .into_iter() - .map(|t| AtomTemplate::try_new(&plan, prefixes, t)) - .collect::>>()?; - - Ok(ResolvedAtomIterator::new( - self.storage, - self.ns_cache.clone(), - IdentifierIssuer::new("b", 0u128), - self.eval_plan(plan), - templates, - )) - } - - pub fn construct_triples( - &'a self, - plan: QueryPlan, - templates: Vec, - ) -> ResolvedTripleIterator<'a> { - ResolvedTripleIterator::new(self.eval_plan(plan), templates) - } - - pub fn make_triple_templates( - &'a self, - plan: &QueryPlan, - prefixes: &HashMap, - templates: Either, Vec>, - ) -> StdResult> { - let mut ns_resolver = NamespaceResolver::new(self.storage, self.ns_cache.clone()); - - match templates { - Left(tpl) => tpl - .into_iter() - .map(|t| TripleTemplate::try_new(&mut ns_resolver, plan, prefixes, Left(t))) - .collect::>>(), - Right(tpl) => tpl - .into_iter() - .map(|t| TripleTemplate::try_new(&mut ns_resolver, plan, prefixes, Right(t))) - .collect::>>(), - } - } - - pub fn eval_plan(&'a self, plan: QueryPlan) -> ResolvedVariablesIterator<'a> { - return self.eval_node(plan.entrypoint)(ResolvedVariables::with_capacity( - plan.variables.len(), - )); - } - - fn eval_node( - &'a self, - node: QueryNode, - ) -> Rc ResolvedVariablesIterator<'a> + 'a> { - match node { - QueryNode::TriplePattern { - subject, - predicate, - object, - } => Rc::new(move |vars| { - Box::new(TriplePatternIterator::new( - self.storage, - vars, - subject.clone(), - predicate.clone(), - object.clone(), - )) - }), - QueryNode::Noop { .. } => Rc::new(|_| Box::new(iter::empty())), - QueryNode::CartesianProductJoin { left, right } => { - let left = self.eval_node(*left); - let right = self.eval_node(*right); - Rc::new(move |vars| { - let mut buffered_errors = VecDeque::new(); - let values = right(vars.clone()) - .filter_map(|res| match res { - Ok(v) => Some(v), - Err(e) => { - buffered_errors.push_back(Err(e)); - None - } - }) - .collect(); - Box::new(CartesianProductJoinIterator::new( - values, - left(vars), - buffered_errors, - )) - }) - } - QueryNode::ForLoopJoin { left, right } => { - let left = self.eval_node(*left); - let right = self.eval_node(*right); - Rc::new(move |vars| { - let right = Rc::clone(&right); - Box::new(ForLoopJoinIterator::new(left(vars), right)) - }) - } - QueryNode::Filter { expr, inner } => { - let inner = self.eval_node(*inner); - Rc::new(move |vars| { - Box::new(FilterIterator::new( - self.storage, - inner(vars), - expr.clone(), - self.ns_cache.clone(), - )) - }) - } - QueryNode::Skip { child, first } => { - let upstream = self.eval_node(*child); - Rc::new(move |vars| Box::new(upstream(vars).skip(first))) - } - QueryNode::Limit { child, first } => { - let upstream = self.eval_node(*child); - Rc::new(move |vars| Box::new(upstream(vars).take(first))) - } - } - } -} - -type ResolvedVariablesIterator<'a> = Box> + 'a>; - -struct FilterIterator<'a> { - upstream: ResolvedVariablesIterator<'a>, - expr: Expression, - ns_resolver: NamespaceResolver<'a>, -} - -impl<'a> FilterIterator<'a> { - fn new( - storage: &'a dyn Storage, - upstream: ResolvedVariablesIterator<'a>, - expr: Expression, - ns_cache: Vec, - ) -> Self { - Self { - upstream, - expr, - ns_resolver: NamespaceResolver::new(storage, ns_cache), - } - } -} - -impl<'a> Iterator for FilterIterator<'a> { - type Item = StdResult; - - fn next(&mut self) -> Option { - loop { - match self.upstream.next()? { - Ok(vars) => match self.expr.evaluate(&vars, &mut self.ns_resolver) { - Ok(t) => { - if t.as_bool() { - return Some(Ok(vars)); - } - } - Err(e) => return Some(Err(e)), - }, - Err(e) => return Some(Err(e)), - } - } - } -} - -struct ForLoopJoinIterator<'a> { - left: ResolvedVariablesIterator<'a>, - right: Rc ResolvedVariablesIterator<'a> + 'a>, - current: ResolvedVariablesIterator<'a>, -} - -impl<'a> ForLoopJoinIterator<'a> { - fn new( - left: ResolvedVariablesIterator<'a>, - right: Rc ResolvedVariablesIterator<'a> + 'a>, - ) -> Self { - Self { - left, - right, - current: Box::new(iter::empty()), - } - } -} - -impl<'a> Iterator for ForLoopJoinIterator<'a> { - type Item = StdResult; - - fn next(&mut self) -> Option { - loop { - if let Some(v) = self.current.next() { - return Some(v); - } - - match self.left.next() { - None => None?, - Some(v) => { - self.current = match v { - Ok(v) => (self.right)(v), - Err(e) => Box::new(iter::once(Err(e))), - } - } - } - } - } -} - -struct CartesianProductJoinIterator<'a> { - values: Vec, - upstream_iter: ResolvedVariablesIterator<'a>, - buffer: VecDeque>, -} - -impl<'a> CartesianProductJoinIterator<'a> { - fn new( - values: Vec, - upstream_iter: ResolvedVariablesIterator<'a>, - buffer: VecDeque>, - ) -> Self { - Self { - values, - upstream_iter, - buffer, - } - } -} - -impl<'a> Iterator for CartesianProductJoinIterator<'a> { - type Item = StdResult; - - fn next(&mut self) -> Option { - loop { - if let Some(val) = self.buffer.pop_front() { - return Some(val); - } - - let upstream_res = match self.upstream_iter.next() { - None => None?, - Some(res) => res, - }; - - match upstream_res { - Err(err) => { - self.buffer.push_back(Err(err)); - } - Ok(val) => { - for downstream_val in &self.values { - if let Some(value) = val.merge_with(downstream_val) { - self.buffer.push_back(Ok(value)); - } - } - } - } - } - } -} - -struct TriplePatternIterator<'a> { - input: ResolvedVariables, - output_bindings: (Option, Option, Option), - triple_iter: Box> + 'a>, -} - -type TriplePatternFilters = (Option, Option, Option); -type TriplePatternBlankFilters = (bool, bool); -type TriplePatternBindings = (Option, Option, Option); - -impl<'a> TriplePatternIterator<'a> { - fn new( - storage: &'a dyn Storage, - input: ResolvedVariables, - subject: PatternValue, - predicate: PatternValue, - object: PatternValue, - ) -> Self { - if let Some((filters, blank_filters, output_bindings)) = - Self::compute_iter_io(&input, subject, predicate, object) - { - return Self { - input, - output_bindings, - triple_iter: Self::make_state_iter(storage, filters, blank_filters), - }; - } - - Self { - input, - output_bindings: (None, None, None), - triple_iter: Box::new(iter::empty()), - } - } - - fn make_state_iter( - storage: &'a dyn Storage, - filters: TriplePatternFilters, - blank_filters: (bool, bool), - ) -> Box> + 'a> { - let post_filter = move |t: &Triple| { - let s = !blank_filters.0 || matches!(t.subject, Subject::Blank(_)); - let o = !blank_filters.1 || matches!(t.object, Object::Blank(_)); - o && s - }; - - match filters { - (Some(s), Some(p), Some(o)) => { - let res = triples().load(storage, (o.as_hash().as_bytes(), p.key(), s.key())); - match res { - Err(StdError::NotFound { .. }) => Box::new(iter::empty()), - _ => Box::new(iter::once(res)), - } - } - (Some(s), Some(p), None) => Box::new( - triples() - .idx - .subject_and_predicate - .prefix((s.key(), p.key())) - .range(storage, None, None, Order::Ascending) - .filter(move |res| match res { - Ok((_, triple)) => post_filter(triple), - Err(_) => true, - }) - .map(|res| res.map(|(_, t)| t)), - ), - (None, Some(p), Some(o)) => Box::new( - triples() - .prefix((o.as_hash().as_bytes(), p.key())) - .range(storage, None, None, Order::Ascending) - .filter(move |res| match res { - Ok((_, triple)) => post_filter(triple), - Err(_) => true, - }) - .map(|res| res.map(|(_, t)| t)), - ), - (Some(s), None, Some(o)) => Box::new( - triples() - .idx - .subject_and_predicate - .sub_prefix(s.key()) - .range(storage, None, None, Order::Ascending) - .filter(move |res| match res { - Ok((_, triple)) => triple.object == o && post_filter(triple), - Err(_) => true, - }) - .map(|res| res.map(|(_, t)| t)), - ), - (Some(s), None, None) => Box::new( - triples() - .idx - .subject_and_predicate - .sub_prefix(s.key()) - .range(storage, None, None, Order::Ascending) - .filter(move |res| match res { - Ok((_, triple)) => post_filter(triple), - Err(_) => true, - }) - .map(|res| res.map(|(_, t)| t)), - ), - (None, Some(p), None) => Box::new( - triples() - .range(storage, None, None, Order::Ascending) - .filter(move |res| match res { - Ok((_, triple)) => triple.predicate == p && post_filter(triple), - Err(_) => true, - }) - .map(|res| res.map(|(_, t)| t)), - ), - (None, None, Some(o)) => Box::new( - triples() - .sub_prefix(o.as_hash().as_bytes()) - .range(storage, None, None, Order::Ascending) - .filter(move |res| match res { - Ok((_, triple)) => post_filter(triple), - Err(_) => true, - }) - .map(|res| res.map(|(_, t)| t)), - ), - (None, None, None) => Box::new( - triples() - .range(storage, None, None, Order::Ascending) - .filter(move |res| match res { - Ok((_, triple)) => post_filter(triple), - Err(_) => true, - }) - .map(|res| res.map(|(_, t)| t)), - ), - } - } - - fn compute_iter_io( - input: &ResolvedVariables, - subject: PatternValue, - predicate: PatternValue, - object: PatternValue, - ) -> Option<( - TriplePatternFilters, - TriplePatternBlankFilters, - TriplePatternBindings, - )> { - let (s_filter, sb_filter, s_bind) = - Self::resolve_pattern_part(subject, ResolvedVariable::as_subject, input)?; - let (p_filter, pb_filter, p_bind) = - Self::resolve_pattern_part(predicate, ResolvedVariable::as_predicate, input)?; - let (o_filter, ob_filter, o_bind) = - Self::resolve_pattern_part(object, ResolvedVariable::as_object, input)?; - - if pb_filter { - None?; - } - - Some(( - (s_filter, p_filter, o_filter), - (sb_filter, ob_filter), - (s_bind, p_bind, o_bind), - )) - } - - fn resolve_pattern_part( - pattern_part: PatternValue, - map_fn: M, - input: &ResolvedVariables, - ) -> Option<(Option, bool, Option)> - where - M: FnOnce(&ResolvedVariable) -> Option, - { - Some(match pattern_part { - PatternValue::Constant(s) => (Some(s), false, None), - PatternValue::BlankVariable(v) => match input.get(v) { - Some(var) => (Some(map_fn(var)?), false, None), - None => (None, true, Some(v)), - }, - PatternValue::Variable(v) => match input.get(v) { - Some(var) => (Some(map_fn(var)?), false, None), - None => (None, false, Some(v)), - }, - }) - } - - fn map_triple(&self, triple: Triple) -> Option { - let mut vars: ResolvedVariables = self.input.clone(); - - if let Some(v) = self.output_bindings.0 { - vars.merge_index(v, ResolvedVariable::Subject(triple.subject))?; - } - if let Some(v) = self.output_bindings.1 { - vars.merge_index(v, ResolvedVariable::Predicate(triple.predicate))?; - } - if let Some(v) = self.output_bindings.2 { - vars.merge_index(v, ResolvedVariable::Object(triple.object))?; - } - - Some(vars) - } -} - -impl<'a> Iterator for TriplePatternIterator<'a> { - type Item = StdResult; - - fn next(&mut self) -> Option { - let next = self.triple_iter.next()?; - - let maybe_next = match next { - Ok(triple) => self.map_triple(triple).map(Ok), - Err(e) => Some(Err(e)), - }; - - if maybe_next.is_none() { - return self.next(); - } - maybe_next - } -} - -pub struct SolutionsIterator<'a> { - iter: ResolvedVariablesIterator<'a>, - bindings: BTreeMap, -} - -impl<'a> SolutionsIterator<'a> { - fn new(iter: ResolvedVariablesIterator<'a>, bindings: BTreeMap) -> Self { - Self { iter, bindings } - } -} - -impl<'a> Iterator for SolutionsIterator<'a> { - type Item = StdResult>; - - fn next(&mut self) -> Option { - let resolved_variables = match self.iter.next() { - None => None?, - Some(res) => res, - }; - - resolved_variables - .and_then(|variables| { - self.bindings - .clone() - .into_iter() - .map(|(name, index)| (name, variables.get(index))) - .map(|(name, var)| match var { - None => Err(StdError::generic_err( - "Couldn't find variable in result set", - )), - Some(val) => Ok((name, val.clone())), - }) - .collect::>>() - }) - .into() - } -} - -pub struct ResolvedTripleIterator<'a> { - upstream_iter: ResolvedVariablesIterator<'a>, - templates: Vec, - buffer: VecDeque>, -} - -impl<'a> ResolvedTripleIterator<'a> { - pub fn new( - upstream_iter: ResolvedVariablesIterator<'a>, - templates: Vec, - ) -> Self { - Self { - upstream_iter, - templates, - buffer: VecDeque::new(), - } - } -} - -impl<'a> Iterator for ResolvedTripleIterator<'a> { - type Item = StdResult; - - fn next(&mut self) -> Option { - loop { - if let Some(val) = self.buffer.pop_front() { - return Some(val); - } - - let upstream_res = match self.upstream_iter.next() { - None => None?, - Some(res) => res, - }; - - match upstream_res { - Err(err) => { - self.buffer.push_back(Err(err)); - } - Ok(vars) => { - for res in self - .templates - .iter() - .map(|template| template.resolve(&vars)) - { - match res { - Ok(Some(triple)) => self.buffer.push_back(Ok(triple)), - Err(err) => self.buffer.push_back(Err(err)), - _ => {} - } - } - } - } - } - } -} - -pub struct TripleTemplate { - subject: Either, - predicate: Either, - object: Either, -} - -pub type TripleTemplateWithBlankNode = (VarOrNode, VarOrNamedNode, VarOrNodeOrLiteral); -pub type TripleTemplateNoBlankNode = (VarOrNamedNode, VarOrNamedNode, VarOrNamedNodeOrLiteral); - -impl TripleTemplate { - fn try_new( - ns_solver: &mut dyn NamespaceSolver, - plan: &QueryPlan, - prefixes: &HashMap, - template: Either, - ) -> StdResult { - let (s_tpl, p_tpl, o_tpl) = match template { - Right((s, p, o)) => (Right(s), p, Right(o)), - Left((s, p, o)) => (Left(s), p, Left(o)), - }; - - Ok(TripleTemplate { - subject: Self::build_subject_template(ns_solver, plan, prefixes, s_tpl)?, - predicate: Self::build_predicate_template(ns_solver, plan, prefixes, p_tpl)?, - object: Self::build_object_template(ns_solver, plan, prefixes, o_tpl)?, - }) - } - - pub fn resolve(&self, vars: &ResolvedVariables) -> StdResult> { - let subject = match Self::resolve_triple_term( - &self.subject, - ResolvedVariable::as_subject, - vars, - "subject", - )? { - Some(s) => s, - None => return Ok(None), - }; - - let predicate = match Self::resolve_triple_term( - &self.predicate, - ResolvedVariable::as_predicate, - vars, - "predicate", - )? { - Some(p) => p, - None => return Ok(None), - }; - - let object = match Self::resolve_triple_term( - &self.object, - ResolvedVariable::as_object, - vars, - "object", - )? { - Some(o) => o, - None => return Ok(None), - }; - - Ok(Some(Triple { - subject, - predicate, - object, - })) - } - - fn resolve_triple_term( - term: &Either, - from_var: F, - vars: &ResolvedVariables, - term_name: &str, - ) -> StdResult> - where - T: Clone, - F: Fn(&ResolvedVariable) -> Option, - { - match term { - Left(p) => Ok(Some(p.clone())), - Right(key) => vars.get(*key).as_ref().map(from_var).ok_or_else(|| { - StdError::generic_err(format!("Unbound {:?} variable: {:?}", term_name, key)) - }), - } - } - - fn build_subject_template( - ns_solver: &mut dyn NamespaceSolver, - plan: &QueryPlan, - prefixes: &HashMap, - value: Either, - ) -> StdResult> { - Ok(match value { - Left(VarOrNode::Variable(v)) | Right(VarOrNamedNode::Variable(v)) => { - Right(plan.get_var_index(v.as_str()).ok_or(StdError::generic_err( - "Selected variable not found in query", - ))?) - } - Left(VarOrNode::Node(Node::BlankNode(n))) => Right( - plan.get_bnode_index(n.as_str()) - .ok_or(StdError::generic_err( - "Selected blank node not found in query", - ))?, - ), - Left(VarOrNode::Node(Node::NamedNode(iri))) | Right(VarOrNamedNode::NamedNode(iri)) => { - Left(Subject::Named(iri_as_node(ns_solver, prefixes, iri)?)) - } - }) - } - - fn build_predicate_template( - ns_solver: &mut dyn NamespaceSolver, - plan: &QueryPlan, - prefixes: &HashMap, - value: VarOrNamedNode, - ) -> StdResult> { - Ok(match value { - VarOrNamedNode::Variable(v) => Right(plan.get_var_index(v.as_str()).ok_or( - StdError::generic_err("Selected variable not found in query"), - )?), - VarOrNamedNode::NamedNode(iri) => Left(iri_as_node(ns_solver, prefixes, iri)?), - }) - } - - fn build_object_template( - ns_solver: &mut dyn NamespaceSolver, - plan: &QueryPlan, - prefixes: &HashMap, - value: Either, - ) -> StdResult> { - Ok(match value { - Left(VarOrNodeOrLiteral::Variable(v)) | Right(VarOrNamedNodeOrLiteral::Variable(v)) => { - Right(plan.get_var_index(v.as_str()).ok_or(StdError::generic_err( - "Selected variable not found in query", - ))?) - } - Left(VarOrNodeOrLiteral::Node(Node::BlankNode(n))) => Right( - plan.get_bnode_index(n.as_str()) - .ok_or(StdError::generic_err( - "Selected blank node not found in query", - ))?, - ), - Left(VarOrNodeOrLiteral::Node(Node::NamedNode(iri))) - | Right(VarOrNamedNodeOrLiteral::NamedNode(iri)) => { - Left(Object::Named(iri_as_node(ns_solver, prefixes, iri)?)) - } - Left(VarOrNodeOrLiteral::Literal(l)) | Right(VarOrNamedNodeOrLiteral::Literal(l)) => { - Left(literal_as_object(ns_solver, prefixes, l)?) - } - }) - } -} - -pub struct ResolvedAtomIterator<'a> { - ns_resolver: NamespaceResolver<'a>, - id_issuer: IdentifierIssuer, - upstream_iter: ResolvedVariablesIterator<'a>, - templates: Vec, - buffer: VecDeque>, -} - -impl<'a> ResolvedAtomIterator<'a> { - pub fn new( - storage: &'a dyn Storage, - ns_cache: Vec, - id_issuer: IdentifierIssuer, - upstream_iter: ResolvedVariablesIterator<'a>, - templates: Vec, - ) -> Self { - Self { - ns_resolver: NamespaceResolver::new(storage, ns_cache), - id_issuer, - upstream_iter, - templates, - buffer: VecDeque::new(), - } - } -} - -impl<'a> Iterator for ResolvedAtomIterator<'a> { - type Item = StdResult; - - fn next(&mut self) -> Option { - loop { - if let Some(val) = self.buffer.pop_front() { - return Some(val); - } - - let upstream_res = match self.upstream_iter.next() { - None => None?, - Some(res) => res, - }; - - match upstream_res { - Err(err) => { - self.buffer.push_back(Err(err)); - } - Ok(vars) => { - for res in self.templates.iter().map(|template| { - template.resolve(&mut self.ns_resolver, &mut self.id_issuer, &vars) - }) { - match res { - Ok(Some(atom)) => self.buffer.push_back(Ok(atom)), - Err(err) => self.buffer.push_back(Err(err)), - _ => {} - } - } - } - } - } - } -} - -pub struct AtomTemplate { - subject: Either, - property: Either, - value: Either, -} - -impl AtomTemplate { - pub fn try_new( - plan: &QueryPlan, - prefixes: &HashMap, - (s_tpl, p_tpl, o_tpl): (VarOrNode, VarOrNamedNode, VarOrNodeOrLiteral), - ) -> StdResult { - Ok(Self { - subject: match s_tpl { - VarOrNode::Variable(key) => Right(plan.get_var_index(key.as_str()).ok_or( - StdError::generic_err("Selected variable not found in query"), - )?), - VarOrNode::Node(n) => Left((n, prefixes).try_into()?), - }, - property: match p_tpl { - VarOrNamedNode::Variable(key) => Right(plan.get_var_index(key.as_str()).ok_or( - StdError::generic_err("Selected variable not found in query"), - )?), - VarOrNamedNode::NamedNode(iri) => Left((iri, prefixes).try_into()?), - }, - value: match o_tpl { - VarOrNodeOrLiteral::Variable(key) => Right( - plan.get_var_index(key.as_str()) - .ok_or(StdError::generic_err( - "Selected variable not found in query", - ))?, - ), - VarOrNodeOrLiteral::Node(n) => Left((n, prefixes).try_into()?), - VarOrNodeOrLiteral::Literal(l) => Left((l, prefixes).try_into()?), - }, - }) - } - - pub fn resolve( - &self, - ns_solver: &mut dyn NamespaceSolver, - id_issuer: &mut IdentifierIssuer, - vars: &ResolvedVariables, - ) -> StdResult> { - let subject = match self.resolve_atom_subject(ns_solver, id_issuer, vars)? { - Some(s) => s, - None => return Ok(None), - }; - - let property = match self.resolve_atom_property(ns_solver, vars)? { - Some(p) => p, - None => return Ok(None), - }; - - let value = match self.resolve_atom_value(ns_solver, id_issuer, vars)? { - Some(v) => v, - None => return Ok(None), - }; - - Ok(Some(Atom { - subject, - property, - value, - })) - } - - fn resolve_atom_subject( - &self, - ns_solver: &mut dyn NamespaceSolver, - id_issuer: &mut IdentifierIssuer, - vars: &ResolvedVariables, - ) -> StdResult> { - Self::resolve_atom_term( - &self.subject, - ResolvedVariable::as_subject, - vars, - &mut |value| { - Ok(match value { - Subject::Named(n) => rdf::Subject::NamedNode(n.as_iri(ns_solver)?), - Subject::Blank(n) => rdf::Subject::BlankNode( - id_issuer.get_str_or_issue(n.to_string()).to_string(), - ), - }) - }, - "subject", - ) - } - - fn resolve_atom_property( - &self, - ns_solver: &mut dyn NamespaceSolver, - vars: &ResolvedVariables, - ) -> StdResult> { - Self::resolve_atom_term( - &self.property, - ResolvedVariable::as_predicate, - vars, - &mut |value| value.as_iri(ns_solver).map(rdf::Property), - "predicate", - ) - } - - fn resolve_atom_value( - &self, - ns_solver: &mut dyn NamespaceSolver, - id_issuer: &mut IdentifierIssuer, - vars: &ResolvedVariables, - ) -> StdResult> { - Self::resolve_atom_term( - &self.value, - ResolvedVariable::as_object, - vars, - &mut |value| { - Ok(match value { - Object::Named(n) => rdf::Value::NamedNode(n.as_iri(ns_solver)?), - Object::Blank(n) => { - rdf::Value::BlankNode(id_issuer.get_str_or_issue(n.to_string()).to_string()) - } - Object::Literal(l) => match l { - state::Literal::Simple { value } => rdf::Value::LiteralSimple(value), - state::Literal::I18NString { value, language } => { - rdf::Value::LiteralLang(value, language) - } - state::Literal::Typed { value, datatype } => { - rdf::Value::LiteralDatatype(value, datatype.as_iri(ns_solver)?) - } - }, - }) - }, - "object", - ) - } - - fn resolve_atom_term( - term: &Either, - from_var: F, - vars: &ResolvedVariables, - mapping_fn: &mut M, - term_name: &str, - ) -> StdResult> - where - A: Clone, - F: Fn(&ResolvedVariable) -> Option, - M: FnMut(T) -> StdResult, - { - match term { - Left(v) => Ok(Some(v.clone())), - Right(key) => { - let var = vars.get(*key).as_ref().ok_or_else(|| { - StdError::generic_err(format!("Unbound {:?} variable: {:?}", term_name, key)) - })?; - - match from_var(var) { - None => Ok(None), - Some(v) => mapping_fn(v).map(Some), - } - } - } - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::msg::StoreLimitsInput; - use crate::querier::expression::Term; - use crate::querier::plan::PlanVariable; - use crate::state; - use crate::state::Object::{Literal, Named}; - use crate::state::{ - Node, Store, StoreStat, BLANK_NODE_IDENTIFIER_COUNTER, NAMESPACE_KEY_INCREMENT, STORE, - }; - use crate::storer::StoreEngine; - use axone_rdf::serde::TripleReader; - use cosmwasm_std::testing::mock_dependencies; - use cosmwasm_std::{Addr, Uint128}; - use std::env; - use std::fs::File; - use std::io::{BufReader, Read}; - use std::path::Path; - - fn read_test_data(file: &str) -> Vec { - let mut bytes: Vec = Vec::new(); - - File::open( - Path::new(&env::var("CARGO_MANIFEST_DIR").unwrap()) - .join("testdata") - .join(file), - ) - .unwrap() - .read_to_end(&mut bytes) - .unwrap(); - - bytes - } - - fn fill_test_data(storage: &mut dyn Storage) { - STORE - .save( - storage, - &Store { - owner: Addr::unchecked("owner"), - limits: StoreLimitsInput::default().into(), - stat: StoreStat::default(), - }, - ) - .unwrap(); - NAMESPACE_KEY_INCREMENT.save(storage, &0u128).unwrap(); - BLANK_NODE_IDENTIFIER_COUNTER.save(storage, &0u128).unwrap(); - let data = read_test_data("sample.rdf.xml"); - let buf = BufReader::new(data.as_slice()); - let mut reader = TripleReader::new(&axone_rdf::serde::DataFormat::RDFXml, buf); - let mut storer = StoreEngine::new(storage).unwrap(); - let count = storer.store_all(&mut reader).unwrap(); - - assert_eq!(count, Uint128::new(40u128)); - } - - #[test] - fn select() { - let mut deps = mock_dependencies(); - fill_test_data(deps.as_mut().storage); - - struct TestCase { - plan: QueryPlan, - selection: Vec, - expects: StdResult<(Vec, Vec>)>, - } - - let cases = vec![ - TestCase { - plan: QueryPlan { - entrypoint: QueryNode::TriplePattern { - subject: PatternValue::Variable(0), - predicate: PatternValue::Variable(1), - object: PatternValue::Variable(2), - }, - variables: vec![ - PlanVariable::Basic("v1".to_string()), - PlanVariable::Basic("v2".to_string()), - PlanVariable::Basic("v3".to_string()), - ], - }, - selection: vec![SelectItem::Variable("v4".to_string())], - expects: Err(StdError::generic_err( - "Selected variable not found in query", - )), - }, - TestCase { - plan: QueryPlan { - entrypoint: QueryNode::TriplePattern { - subject: PatternValue::Constant(Subject::Named(state::Node { - namespace: 0, - value: "97ff7e16-c08d-47be-8475-211016c82e33".to_string(), - })), - predicate: PatternValue::Constant(state::Node { - namespace: 3, - value: "hasRegistrar".to_string(), - }), - object: PatternValue::Variable(0), - }, - variables: vec![PlanVariable::Basic("registrar".to_string())], - }, - selection: vec![SelectItem::Variable("registrar".to_string())], - expects: Ok(( - vec!["registrar".to_string()], - vec![BTreeMap::from([( - "registrar".to_string(), - ResolvedVariable::Object(Named(Node { - namespace: 4, - value: - "0x04d1f1b8f8a7a28f9a5a254c326a963a22f5a5b5d5f5e5d5c5b5a5958575655" - .to_string(), - })), - )])], - )), - }, - TestCase { - plan: QueryPlan { - entrypoint: QueryNode::TriplePattern { - subject: PatternValue::Constant(Subject::Named(state::Node { - namespace: 0, - value: "97ff7e16-c08d-47be-8475-211016c82e33".to_string(), - })), - predicate: PatternValue::Variable(0), - object: PatternValue::Variable(0), - }, - variables: vec![PlanVariable::Basic("v".to_string())], - }, - selection: vec![SelectItem::Variable("v".to_string())], - expects: Ok((vec!["v".to_string()], vec![])), - }, - TestCase { - plan: QueryPlan { - entrypoint: QueryNode::Limit { - child: Box::new(QueryNode::Skip { - child: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(0), - predicate: PatternValue::Variable(1), - object: PatternValue::Variable(2), - }), - first: 10, - }), - first: 3, - }, - variables: vec![ - PlanVariable::Basic("subject".to_string()), - PlanVariable::Basic("predicate".to_string()), - PlanVariable::Basic("object".to_string()), - ], - }, - selection: vec![ - SelectItem::Variable("subject".to_string()), - SelectItem::Variable("predicate".to_string()), - SelectItem::Variable("object".to_string()), - ], - expects: Ok(( - vec![ - "object".to_string(), - "predicate".to_string(), - "subject".to_string(), - ], - vec![ - BTreeMap::from([ - ( - "subject".to_string(), - ResolvedVariable::Subject(Subject::Named(Node { - namespace: 11, - value: "d1615703-4ee1-4e2f-997e-15aecf1eea4e".to_string(), - })), - ), - ( - "predicate".to_string(), - ResolvedVariable::Predicate(Node { - namespace: 3, - value: "describes".to_string(), - }), - ), - ( - "object".to_string(), - ResolvedVariable::Object(Named(Node { - namespace: 8, - value: "0ea1fc7a-dd97-4adc-a10e-169c6597bcde".to_string(), - })), - ), - ]), - BTreeMap::from([ - ( - "subject".to_string(), - ResolvedVariable::Subject(Subject::Named(Node { - namespace: 11, - value: "d1615703-4ee1-4e2f-997e-15aecf1eea4e".to_string(), - })), - ), - ( - "predicate".to_string(), - ResolvedVariable::Predicate(Node { - namespace: 3, - value: "hasDescription".to_string(), - }), - ), - ( - "object".to_string(), - ResolvedVariable::Object(Literal(state::Literal::I18NString { - value: "Un Dataset de test.".to_string(), - language: "fr".to_string(), - })), - ), - ]), - BTreeMap::from([ - ( - "subject".to_string(), - ResolvedVariable::Subject(Subject::Named(Node { - namespace: 11, - value: "d1615703-4ee1-4e2f-997e-15aecf1eea4e".to_string(), - })), - ), - ( - "predicate".to_string(), - ResolvedVariable::Predicate(Node { - namespace: 3, - value: "hasTitle".to_string(), - }), - ), - ( - "object".to_string(), - ResolvedVariable::Object(Literal(state::Literal::I18NString { - value: "test Dataset".to_string(), - language: "en".to_string(), - })), - ), - ]), - ], - )), - }, - ]; - - for case in cases { - let engine = QueryEngine::new(&deps.storage, vec![]); - assert_eq!( - engine.select(case.plan, case.selection).and_then(|res| Ok(( - res.head.clone(), - res.solutions - .collect::>>>()? - ))), - case.expects - ); - } - } - - #[test] - fn eval_plan() { - let mut deps = mock_dependencies(); - fill_test_data(deps.as_mut().storage); - - struct TestCase { - plan: QueryPlan, - expects: usize, - } - - let cases = vec![ - TestCase { - plan: QueryPlan { - entrypoint: QueryNode::TriplePattern { - subject: PatternValue::Variable(0), - predicate: PatternValue::Variable(1), - object: PatternValue::Variable(2), - }, - variables: vec![ - PlanVariable::Basic("v1".to_string()), - PlanVariable::Basic("v2".to_string()), - PlanVariable::Basic("v3".to_string()), - ], - }, - expects: 40, - }, - TestCase { - plan: QueryPlan { - entrypoint: QueryNode::Limit { - child: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(0), - predicate: PatternValue::Variable(1), - object: PatternValue::Variable(2), - }), - first: 30, - }, - variables: vec![ - PlanVariable::Basic("v1".to_string()), - PlanVariable::Basic("v2".to_string()), - PlanVariable::Basic("v3".to_string()), - ], - }, - expects: 30, - }, - TestCase { - plan: QueryPlan { - entrypoint: QueryNode::Limit { - child: Box::new(QueryNode::Skip { - child: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(0), - predicate: PatternValue::Variable(1), - object: PatternValue::Variable(2), - }), - first: 20, - }), - first: 30, - }, - variables: vec![ - PlanVariable::Basic("v1".to_string()), - PlanVariable::Basic("v2".to_string()), - PlanVariable::Basic("v3".to_string()), - ], - }, - expects: 20, - }, - TestCase { - plan: QueryPlan { - entrypoint: QueryNode::CartesianProductJoin { - left: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(0), - predicate: PatternValue::Constant(state::Node { - namespace: 1, - value: "type".to_string(), - }), - object: PatternValue::Constant(Object::Named(state::Node { - namespace: 2, - value: "NamedIndividual".to_string(), - })), - }), - right: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(1), - predicate: PatternValue::Constant(state::Node { - namespace: 3, - value: "hasPublisher".to_string(), - }), - object: PatternValue::Constant(Object::Literal( - state::Literal::Simple { - value: "AXONE".to_string(), - }, - )), - }), - }, - variables: vec![ - PlanVariable::Basic("v1".to_string()), - PlanVariable::Basic("v2".to_string()), - ], - }, - expects: 10, - }, - TestCase { - plan: QueryPlan { - entrypoint: QueryNode::ForLoopJoin { - left: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(0), - predicate: PatternValue::Constant(state::Node { - namespace: 1, - value: "type".to_string(), - }), - object: PatternValue::Constant(Object::Named(state::Node { - namespace: 2, - value: "NamedIndividual".to_string(), - })), - }), - right: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(0), - predicate: PatternValue::Constant(state::Node { - namespace: 3, - value: "hasTag".to_string(), - }), - object: PatternValue::Variable(1), - }), - }, - variables: vec![ - PlanVariable::Basic("v1".to_string()), - PlanVariable::Basic("v2".to_string()), - ], - }, - expects: 3, - }, - ]; - - let engine = QueryEngine::new(&deps.storage, vec![]); - for case in cases { - assert_eq!(engine.eval_plan(case.plan).count(), case.expects); - } - } - - #[test] - fn filter_iter() { - let cases = vec![ - ( - Expression::Equal( - Box::new(Expression::Variable(0usize)), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Ok(1usize), - ), - ( - Expression::Not(Box::new(Expression::Equal( - Box::new(Expression::Variable(0usize)), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ))), - Ok(3usize), - ), - ( - Expression::Greater( - Box::new(Expression::Variable(0usize)), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Ok(2usize), - ), - ( - Expression::Equal( - Box::new(Expression::Variable(1usize)), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Err(StdError::generic_err("Unbound filter variable")), - ), - ( - Expression::Equal( - Box::new(Expression::Variable(3usize)), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Err(StdError::generic_err("Unbound filter variable")), - ), - ( - Expression::Equal( - Box::new(Expression::Variable(2usize)), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Err(StdError::not_found("Namespace")), - ), - ]; - - let mut upstream = Vec::with_capacity(4); - for i in 0..4 { - let mut vars = ResolvedVariables::with_capacity(3); - vars.merge_index( - 0, - ResolvedVariable::Object(Object::Literal(state::Literal::Simple { - value: format!("{i}"), - })), - ); - vars.merge_index( - 2, - ResolvedVariable::Predicate(Node { - namespace: 0, - value: "foo".to_string(), - }), - ); - upstream.push(vars); - } - - let deps = mock_dependencies(); - for (expr, expects) in cases { - let result = FilterIterator::new( - &deps.storage, - Box::new(upstream.iter().map(|v| Ok(v.clone()))), - expr, - vec![], - ) - .collect::>>(); - - assert_eq!(result.map(|s| s.len()), expects); - } - } - - #[test] - fn for_loop_join_iter() { - struct TestCase { - left: Vec, - right: Vec, - expects: Vec<(u128, u128)>, - } - - let cases = vec![ - TestCase { - left: vec![], - right: vec![0u128, 1u128], - expects: vec![], - }, - TestCase { - left: vec![2u128], - right: vec![0u128, 1u128], - expects: vec![(2u128, 0u128), (2u128, 1u128)], - }, - TestCase { - left: vec![2u128, 3u128], - right: vec![0u128, 1u128], - expects: vec![ - (2u128, 0u128), - (2u128, 1u128), - (3u128, 0u128), - (3u128, 1u128), - ], - }, - ]; - - for case in cases { - let result = ForLoopJoinIterator::new( - Box::new(case.left.iter().map(|v| { - let mut vars = ResolvedVariables::with_capacity(3); - vars.merge_index(1, ResolvedVariable::Subject(Subject::Blank(*v))); - Ok(vars) - })), - Rc::new(|input| { - Box::new(case.right.iter().map(move |v| { - let mut vars = input.clone(); - vars.merge_index(2, ResolvedVariable::Subject(Subject::Blank(*v))); - Ok(vars) - })) - }), - ) - .collect::>>(); - assert!(result.is_ok()); - - let expects: Vec = case - .expects - .iter() - .map(|(v1, v2)| { - let mut vars = ResolvedVariables::with_capacity(3); - vars.merge_index(1, ResolvedVariable::Subject(Subject::Blank(*v1))); - vars.merge_index(2, ResolvedVariable::Subject(Subject::Blank(*v2))); - vars - }) - .collect(); - - assert_eq!(result.unwrap(), expects); - } - } - - #[test] - fn cartesian_join_iter() { - struct TestCase { - left: Vec, - right: Vec, - expects: Vec>, - } - - let cases = vec![ - TestCase { - left: vec![], - right: vec![0u128, 1u128], - expects: vec![], - }, - TestCase { - left: vec![0u128, 1u128], - right: vec![], - expects: vec![], - }, - TestCase { - left: vec![2u128], - right: vec![0u128, 1u128], - expects: vec![vec![0u128, 2u128], vec![1u128, 2u128]], - }, - TestCase { - left: vec![2u128, 3u128], - right: vec![0u128, 1u128], - expects: vec![ - vec![0u128, 2u128], - vec![1u128, 2u128], - vec![0u128, 3u128], - vec![1u128, 3u128], - ], - }, - ]; - - for case in cases { - let result = CartesianProductJoinIterator::new( - case.right - .iter() - .map(|v| { - let mut vars = ResolvedVariables::with_capacity(2); - vars.merge_index(0, ResolvedVariable::Subject(Subject::Blank(*v))); - vars - }) - .collect(), - Box::new(case.left.iter().map(|v| { - let mut vars = ResolvedVariables::with_capacity(2); - vars.merge_index(1, ResolvedVariable::Subject(Subject::Blank(*v))); - Ok(vars) - })), - VecDeque::new(), - ) - .collect::>>(); - assert!(result.is_ok()); - - let expects: Vec = case - .expects - .iter() - .map(|v| { - let mut vars = ResolvedVariables::with_capacity(2); - if let Some(val) = v.get(0) { - vars.merge_index(0, ResolvedVariable::Subject(Subject::Blank(*val))); - } - if let Some(val) = v.get(1) { - vars.merge_index(1, ResolvedVariable::Subject(Subject::Blank(*val))); - } - vars - }) - .collect(); - - assert_eq!(result.unwrap(), expects); - } - } - - #[test] - fn triple_pattern_iter_compute_io() { - let t_subject = Subject::Blank(0u128); - let t_predicate = state::Node { - namespace: 0u128, - value: "whatever".to_string(), - }; - let t_object = Object::Blank(1u128); - - let mut variables = ResolvedVariables::with_capacity(6); - variables.merge_index(1, ResolvedVariable::Subject(t_subject.clone())); - variables.merge_index(2, ResolvedVariable::Predicate(t_predicate.clone())); - variables.merge_index(3, ResolvedVariable::Object(t_object.clone())); - - struct TestCase { - subject: PatternValue, - predicate: PatternValue, - object: PatternValue, - expects: Option<( - TriplePatternFilters, - TriplePatternBlankFilters, - TriplePatternBindings, - )>, - } - let cases = vec![ - TestCase { - subject: PatternValue::Variable(0), - predicate: PatternValue::Variable(4), - object: PatternValue::Variable(5), - expects: Some(( - (None, None, None), - (false, false), - (Some(0), Some(4), Some(5)), - )), - }, - TestCase { - subject: PatternValue::BlankVariable(0), - predicate: PatternValue::Variable(4), - object: PatternValue::BlankVariable(5), - expects: Some(( - (None, None, None), - (true, true), - (Some(0), Some(4), Some(5)), - )), - }, - TestCase { - subject: PatternValue::BlankVariable(0), - predicate: PatternValue::BlankVariable(4), - object: PatternValue::BlankVariable(5), - expects: None, - }, - TestCase { - subject: PatternValue::Variable(1), - predicate: PatternValue::Variable(4), - object: PatternValue::Variable(5), - expects: Some(( - (Some(t_subject.clone()), None, None), - (false, false), - (None, Some(4), Some(5)), - )), - }, - TestCase { - subject: PatternValue::Variable(1), - predicate: PatternValue::Variable(2), - object: PatternValue::Variable(5), - expects: Some(( - (Some(t_subject.clone()), Some(t_predicate.clone()), None), - (false, false), - (None, None, Some(5)), - )), - }, - TestCase { - subject: PatternValue::Variable(1), - predicate: PatternValue::Variable(2), - object: PatternValue::Variable(3), - expects: Some(( - (Some(t_subject), Some(t_predicate), Some(t_object)), - (false, false), - (None, None, None), - )), - }, - TestCase { - subject: PatternValue::Variable(3), - predicate: PatternValue::Variable(4), - object: PatternValue::Variable(5), - expects: Some(( - (Some(Subject::Blank(1u128)), None, None), - (false, false), - (None, Some(4), Some(5)), - )), - }, - TestCase { - subject: PatternValue::Variable(3), - predicate: PatternValue::Variable(1), - object: PatternValue::Variable(5), - expects: None, - }, - ]; - - for case in cases { - assert_eq!( - TriplePatternIterator::compute_iter_io( - &variables, - case.subject, - case.predicate, - case.object - ), - case.expects - ); - } - } - - #[test] - fn triple_pattern_iter_make_state_iter() { - let mut deps = mock_dependencies(); - fill_test_data(deps.as_mut().storage); - - struct TestCase { - filters: TriplePatternFilters, - expects: usize, - } - let cases = vec![ - TestCase { - filters: (None, None, None), - expects: 40, - }, - TestCase { - filters: ( - Some(Subject::Named(state::Node { - namespace: 0u128, - value: "97ff7e16-c08d-47be-8475-211016c82e33".to_string(), - })), - None, - None, - ), - expects: 3, - }, - TestCase { - filters: ( - None, - Some(state::Node { - namespace: 1u128, - value: "type".to_string(), - }), - None, - ), - expects: 10, - }, - TestCase { - filters: ( - None, - None, - Some(Object::Named(state::Node { - namespace: 0u128, - value: "97ff7e16-c08d-47be-8475-211016c82e33".to_string(), - })), - ), - expects: 2, - }, - TestCase { - filters: ( - Some(Subject::Named(state::Node { - namespace: 0u128, - value: "97ff7e16-c08d-47be-8475-211016c82e33".to_string(), - })), - Some(state::Node { - namespace: 1u128, - value: "type".to_string(), - }), - None, - ), - expects: 2, - }, - TestCase { - filters: ( - None, - Some(state::Node { - namespace: 1u128, - value: "type".to_string(), - }), - Some(Object::Named(state::Node { - namespace: 2u128, - value: "NamedIndividual".to_string(), - })), - ), - expects: 5, - }, - TestCase { - filters: ( - Some(Subject::Named(state::Node { - namespace: 0u128, - value: "97ff7e16-c08d-47be-8475-211016c82e33".to_string(), - })), - Some(state::Node { - namespace: 1u128, - value: "type".to_string(), - }), - Some(Object::Named(state::Node { - namespace: 2u128, - value: "NamedIndividual".to_string(), - })), - ), - expects: 1, - }, - TestCase { - filters: ( - Some(Subject::Named(state::Node { - namespace: 0u128, - value: "not-existing".to_string(), - })), - Some(state::Node { - namespace: 1u128, - value: "type".to_string(), - }), - Some(Object::Named(state::Node { - namespace: 2u128, - value: "NamedIndividual".to_string(), - })), - ), - expects: 0, - }, - ]; - - for case in cases { - assert_eq!( - TriplePatternIterator::make_state_iter(&deps.storage, case.filters, (false, false)) - .count(), - case.expects - ); - } - } -} diff --git a/contracts/axone-cognitarium/src/querier/expression.rs b/contracts/axone-cognitarium/src/querier/expression.rs deleted file mode 100644 index 816379e0..00000000 --- a/contracts/axone-cognitarium/src/querier/expression.rs +++ /dev/null @@ -1,514 +0,0 @@ -use crate::parser; -use crate::querier::mapper::iri_as_string; -use crate::querier::variable::HasBoundVariables; -use crate::querier::ResolvedVariables; -use crate::state::NamespaceSolver; -use cosmwasm_std::{StdError, StdResult}; -use std::cmp::Ordering; -use std::collections::HashMap; - -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum Expression { - Constant(Term), - Variable(usize), - And(Vec), - Or(Vec), - Equal(Box, Box), - Greater(Box, Box), - GreaterOrEqual(Box, Box), - Less(Box, Box), - LessOrEqual(Box, Box), - Not(Box), -} - -impl Expression { - pub fn evaluate( - &self, - vars: &ResolvedVariables, - ns_solver: &mut dyn NamespaceSolver, - ) -> StdResult { - match self { - Expression::Constant(term) => Ok(term.clone()), - Expression::Variable(v) => vars - .get(*v) - .clone() - .ok_or(StdError::generic_err("Unbound filter variable")) - .and_then(|v| v.as_term(ns_solver)), - Expression::And(exprs) => { - for expr in exprs { - if !expr.evaluate(vars, ns_solver)?.as_bool() { - return Ok(Term::Boolean(false)); - } - } - Ok(Term::Boolean(true)) - } - Expression::Or(exprs) => { - for expr in exprs { - if expr.evaluate(vars, ns_solver)?.as_bool() { - return Ok(Term::Boolean(true)); - } - } - Ok(Term::Boolean(false)) - } - Expression::Equal(left, right) => Ok(Term::Boolean( - left.evaluate(vars, ns_solver)? == right.evaluate(vars, ns_solver)?, - )), - Expression::Greater(left, right) => Ok(Term::Boolean( - left.evaluate(vars, ns_solver)? > right.evaluate(vars, ns_solver)?, - )), - Expression::GreaterOrEqual(left, right) => Ok(Term::Boolean( - left.evaluate(vars, ns_solver)? >= right.evaluate(vars, ns_solver)?, - )), - Expression::Less(left, right) => Ok(Term::Boolean( - left.evaluate(vars, ns_solver)? < right.evaluate(vars, ns_solver)?, - )), - Expression::LessOrEqual(left, right) => Ok(Term::Boolean( - left.evaluate(vars, ns_solver)? <= right.evaluate(vars, ns_solver)?, - )), - Expression::Not(expr) => Ok(Term::Boolean(!expr.evaluate(vars, ns_solver)?.as_bool())), - } - } -} - -impl HasBoundVariables for Expression { - fn lookup_bound_variables(&self, callback: &mut impl FnMut(usize)) { - match self { - Expression::Constant(_) => {} - Expression::Variable(v) => { - callback(*v); - } - Expression::And(exprs) | Expression::Or(exprs) => { - exprs - .iter() - .for_each(|e| e.lookup_bound_variables(callback)); - } - Expression::Equal(left, right) - | Expression::Greater(left, right) - | Expression::GreaterOrEqual(left, right) - | Expression::Less(left, right) - | Expression::LessOrEqual(left, right) => { - left.lookup_bound_variables(callback); - right.lookup_bound_variables(callback); - } - Expression::Not(expr) => { - expr.lookup_bound_variables(callback); - } - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum Term { - String(String), - Boolean(bool), -} - -impl Term { - pub fn from_iri(iri: parser::IRI, prefixes: &HashMap) -> StdResult { - Ok(Term::String(iri_as_string(iri, prefixes)?)) - } - - pub fn from_literal( - literal: parser::Literal, - prefixes: &HashMap, - ) -> StdResult { - Ok(Term::String(match literal { - parser::Literal::Simple(value) => value, - parser::Literal::LanguageTaggedString { value, language } => { - format!("{}{}", value, language) - } - parser::Literal::TypedValue { value, datatype } => { - format!("{}{}", value, iri_as_string(datatype, prefixes)?) - } - })) - } - - pub fn as_string(&self) -> String { - match self { - Term::String(t) => t.clone(), - Term::Boolean(b) => b.to_string(), - } - } - - pub fn as_bool(&self) -> bool { - match self { - Term::String(s) => !s.is_empty(), - Term::Boolean(b) => *b, - } - } -} - -impl PartialOrd for Term { - fn partial_cmp(&self, other: &Term) -> Option { - if self == other { - return Some(Ordering::Equal); - } - - match (self, other) { - (Term::String(left), Term::String(right)) => Some(left.cmp(right)), - (Term::Boolean(left), Term::Boolean(right)) => Some(left.cmp(right)), - _ => None, - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::querier::variable::ResolvedVariable; - use crate::state::{InMemoryNamespaceSolver, Node, Object}; - use std::collections::BTreeSet; - - #[test] - fn expression_bound_variables() { - let cases = vec![ - ( - Expression::Constant(Term::String("foo".to_string())), - vec![], - ), - (Expression::Variable(0), vec![0]), - ( - Expression::And(vec![Expression::Variable(0), Expression::Variable(1)]), - vec![0, 1], - ), - ( - Expression::Or(vec![Expression::Variable(0), Expression::Variable(1)]), - vec![0, 1], - ), - ( - Expression::Equal( - Box::new(Expression::Variable(0)), - Box::new(Expression::Variable(1)), - ), - vec![0, 1], - ), - ( - Expression::Greater( - Box::new(Expression::Variable(0)), - Box::new(Expression::Variable(1)), - ), - vec![0, 1], - ), - ( - Expression::GreaterOrEqual( - Box::new(Expression::Variable(0)), - Box::new(Expression::Variable(1)), - ), - vec![0, 1], - ), - ( - Expression::Less( - Box::new(Expression::Variable(0)), - Box::new(Expression::Variable(1)), - ), - vec![0, 1], - ), - ( - Expression::LessOrEqual( - Box::new(Expression::Variable(0)), - Box::new(Expression::Variable(1)), - ), - vec![0, 1], - ), - (Expression::Not(Box::new(Expression::Variable(0))), vec![0]), - ]; - - for case in cases { - assert_eq!(case.0.bound_variables(), BTreeSet::from_iter(case.1)); - } - } - - #[test] - fn expression_evaluate() { - let cases = vec![ - ( - Expression::Constant(Term::Boolean(true)), - Ok(Term::Boolean(true)), - ), - ( - Expression::Variable(0), - Ok(Term::String("http:://example.com/foo".to_string())), - ), - ( - Expression::Variable(1), - Err(StdError::not_found("Namespace")), - ), - ( - Expression::Variable(12), - Err(StdError::generic_err("Unbound filter variable")), - ), - ( - Expression::And(vec![ - Expression::Constant(Term::Boolean(true)), - Expression::Constant(Term::Boolean(true)), - ]), - Ok(Term::Boolean(true)), - ), - (Expression::And(vec![]), Ok(Term::Boolean(true))), - ( - Expression::And(vec![ - Expression::Constant(Term::Boolean(true)), - Expression::Constant(Term::Boolean(false)), - ]), - Ok(Term::Boolean(false)), - ), - ( - Expression::Or(vec![ - Expression::Constant(Term::Boolean(true)), - Expression::Constant(Term::Boolean(false)), - ]), - Ok(Term::Boolean(true)), - ), - (Expression::Or(vec![]), Ok(Term::Boolean(false))), - ( - Expression::Or(vec![ - Expression::Constant(Term::Boolean(false)), - Expression::Constant(Term::Boolean(false)), - ]), - Ok(Term::Boolean(false)), - ), - ( - Expression::Equal( - Box::new(Expression::Constant(Term::String("foo".to_string()))), - Box::new(Expression::Constant(Term::String("foo".to_string()))), - ), - Ok(Term::Boolean(true)), - ), - ( - Expression::Equal( - Box::new(Expression::Constant(Term::String("foo".to_string()))), - Box::new(Expression::Constant(Term::String("bar".to_string()))), - ), - Ok(Term::Boolean(false)), - ), - ( - Expression::Greater( - Box::new(Expression::Constant(Term::String("1".to_string()))), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Ok(Term::Boolean(false)), - ), - ( - Expression::Greater( - Box::new(Expression::Constant(Term::String("2".to_string()))), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Ok(Term::Boolean(true)), - ), - ( - Expression::GreaterOrEqual( - Box::new(Expression::Constant(Term::String("1".to_string()))), - Box::new(Expression::Constant(Term::String("2".to_string()))), - ), - Ok(Term::Boolean(false)), - ), - ( - Expression::GreaterOrEqual( - Box::new(Expression::Constant(Term::String("1".to_string()))), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Ok(Term::Boolean(true)), - ), - ( - Expression::GreaterOrEqual( - Box::new(Expression::Constant(Term::String("2".to_string()))), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Ok(Term::Boolean(true)), - ), - ( - Expression::Less( - Box::new(Expression::Constant(Term::String("1".to_string()))), - Box::new(Expression::Constant(Term::String("2".to_string()))), - ), - Ok(Term::Boolean(true)), - ), - ( - Expression::Less( - Box::new(Expression::Constant(Term::String("1".to_string()))), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Ok(Term::Boolean(false)), - ), - ( - Expression::LessOrEqual( - Box::new(Expression::Constant(Term::String("1".to_string()))), - Box::new(Expression::Constant(Term::String("2".to_string()))), - ), - Ok(Term::Boolean(true)), - ), - ( - Expression::LessOrEqual( - Box::new(Expression::Constant(Term::String("1".to_string()))), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Ok(Term::Boolean(true)), - ), - ( - Expression::LessOrEqual( - Box::new(Expression::Constant(Term::String("2".to_string()))), - Box::new(Expression::Constant(Term::String("1".to_string()))), - ), - Ok(Term::Boolean(false)), - ), - ( - Expression::Not(Box::new(Expression::Constant(Term::Boolean(true)))), - Ok(Term::Boolean(false)), - ), - ( - Expression::Not(Box::new(Expression::Constant(Term::Boolean(false)))), - Ok(Term::Boolean(true)), - ), - ]; - - let mut vars = ResolvedVariables::with_capacity(2); - vars.merge_index( - 0, - ResolvedVariable::Object(Object::Named(Node { - namespace: 0, - value: "foo".to_string(), - })), - ); - vars.merge_index( - 1, - ResolvedVariable::Object(Object::Named(Node { - namespace: 12, - value: "foo".to_string(), - })), - ); - - let mut ns_solver = InMemoryNamespaceSolver::with(vec![(0, "http:://example.com/")]); - for case in cases { - assert_eq!(case.0.evaluate(&vars, &mut ns_solver), case.1); - } - } - - #[test] - fn term_from_iri() { - let cases = vec![ - ( - parser::IRI::Prefixed("foo:bar".to_string()), - Ok(Term::String("http://example.com/bar".to_string())), - ), - ( - parser::IRI::Full("foo:bar".to_string()), - Ok(Term::String("foo:bar".to_string())), - ), - ( - parser::IRI::Prefixed("unknown:bar".to_string()), - Err(StdError::generic_err("Prefix not found: unknown")), - ), - ]; - - let mut prefixes = HashMap::new(); - prefixes.insert("foo".to_string(), "http://example.com/".to_string()); - - for case in cases { - assert_eq!(Term::from_iri(case.0, &prefixes), case.1); - } - } - - #[test] - fn term_from_literal() { - let cases = vec![ - ( - parser::Literal::Simple("foo".to_string()), - Ok(Term::String("foo".to_string())), - ), - ( - parser::Literal::LanguageTaggedString { - value: "foo".to_string(), - language: "en".to_string(), - }, - Ok(Term::String("fooen".to_string())), - ), - ( - parser::Literal::TypedValue { - value: "foo".to_string(), - datatype: parser::IRI::Prefixed("foo:bar".to_string()), - }, - Ok(Term::String("foohttp://example.com/bar".to_string())), - ), - ( - parser::Literal::TypedValue { - value: "foo".to_string(), - datatype: parser::IRI::Prefixed("unknown:bar".to_string()), - }, - Err(StdError::generic_err("Prefix not found: unknown")), - ), - ]; - - let mut prefixes = HashMap::new(); - prefixes.insert("foo".to_string(), "http://example.com/".to_string()); - - for case in cases { - assert_eq!(Term::from_literal(case.0, &prefixes), case.1); - } - } - - #[test] - fn term_as_string() { - let cases = vec![ - (Term::String("foo".to_string()), "foo"), - (Term::Boolean(true), "true"), - (Term::Boolean(false), "false"), - ]; - for case in cases { - assert_eq!(case.0.as_string(), case.1); - } - } - - #[test] - fn term_as_bool() { - let cases = vec![ - (Term::String("foo".to_string()), true), - (Term::String("".to_string()), false), - (Term::Boolean(true), true), - (Term::Boolean(false), false), - ]; - for case in cases { - assert_eq!(case.0.as_bool(), case.1); - } - } - - #[test] - fn term_partial_cmp() { - let cases = vec![ - ( - Term::String("a".to_string()), - Term::String("b".to_string()), - Some(Ordering::Less), - ), - ( - Term::String("b".to_string()), - Term::String("a".to_string()), - Some(Ordering::Greater), - ), - ( - Term::String("a".to_string()), - Term::String("a".to_string()), - Some(Ordering::Equal), - ), - ( - Term::Boolean(true), - Term::Boolean(false), - Some(Ordering::Greater), - ), - ( - Term::Boolean(false), - Term::Boolean(true), - Some(Ordering::Less), - ), - ( - Term::Boolean(true), - Term::Boolean(true), - Some(Ordering::Equal), - ), - (Term::String("a".to_string()), Term::Boolean(true), None), - (Term::Boolean(true), Term::String("a".to_string()), None), - ]; - for case in cases { - assert_eq!(case.0.partial_cmp(&case.1), case.2); - } - } -} diff --git a/contracts/axone-cognitarium/src/querier/mapper.rs b/contracts/axone-cognitarium/src/querier/mapper.rs deleted file mode 100644 index a9b241dc..00000000 --- a/contracts/axone-cognitarium/src/querier/mapper.rs +++ /dev/null @@ -1,48 +0,0 @@ -use crate::parser::{Literal, IRI}; -use crate::state; -use crate::state::{NamespaceSolver, Object}; -use axone_rdf::uri::{expand_uri, explode_iri}; -use cosmwasm_std::StdResult; -use std::collections::HashMap; - -pub fn literal_as_object( - ns_solver: &mut dyn NamespaceSolver, - prefixes: &HashMap, - literal: Literal, -) -> StdResult { - Ok(Object::Literal(match literal { - Literal::Simple(value) => state::Literal::Simple { value }, - Literal::LanguageTaggedString { value, language } => { - state::Literal::I18NString { value, language } - } - Literal::TypedValue { value, datatype } => state::Literal::Typed { - value, - datatype: iri_as_node(ns_solver, prefixes, datatype)?, - }, - })) -} - -pub fn iri_as_node( - ns_solver: &mut dyn NamespaceSolver, - prefixes: &HashMap, - iri: IRI, -) -> StdResult { - match iri { - IRI::Prefixed(prefixed) => expand_uri(&prefixed, prefixes), - IRI::Full(full) => Ok(full), - } - .and_then(|iri| explode_iri(&iri)) - .and_then(|(ns_key, v)| { - ns_solver.resolve_from_val(ns_key).map(|ns| state::Node { - namespace: ns.key, - value: v, - }) - }) -} - -pub fn iri_as_string(iri: IRI, prefixes: &HashMap) -> StdResult { - match iri { - IRI::Prefixed(prefixed) => expand_uri(&prefixed, prefixes), - IRI::Full(full) => Ok(full), - } -} diff --git a/contracts/axone-cognitarium/src/querier/mod.rs b/contracts/axone-cognitarium/src/querier/mod.rs deleted file mode 100644 index 84e4d6e7..00000000 --- a/contracts/axone-cognitarium/src/querier/mod.rs +++ /dev/null @@ -1,11 +0,0 @@ -mod engine; -mod expression; -mod mapper; -mod plan; -mod plan_builder; -mod variable; - -pub use engine::*; -pub use plan::*; -pub use plan_builder::*; -pub use variable::ResolvedVariables; diff --git a/contracts/axone-cognitarium/src/querier/plan.rs b/contracts/axone-cognitarium/src/querier/plan.rs deleted file mode 100644 index fd573eb0..00000000 --- a/contracts/axone-cognitarium/src/querier/plan.rs +++ /dev/null @@ -1,218 +0,0 @@ -use crate::querier::expression::Expression; -use crate::querier::variable::HasBoundVariables; -use crate::state::{Object, Predicate, Subject}; - -/// Represents a querying plan. -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct QueryPlan { - /// References the ending node of the plan, when evaluated others nodes will be invoked in - /// cascade. - pub entrypoint: QueryNode, - - /// Contains all the query variables, their index in this array are internally used as - /// identifiers. - pub variables: Vec, -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum PlanVariable { - Basic(String), - BlankNode(String), -} - -impl QueryPlan { - pub fn empty_plan() -> Self { - Self { - entrypoint: QueryNode::noop(), - variables: Vec::new(), - } - } - - /// Resolve the index corresponding to the variable name, if not attached to a blank node. - pub fn get_var_index(&self, var_name: &str) -> Option { - self.variables.iter().enumerate().find_map(|(index, it)| { - matches!(it, PlanVariable::Basic(name) if name == var_name).then_some(index) - }) - } - - /// Resolve the index corresponding to blank node name. - pub fn get_bnode_index(&self, bnode_name: &str) -> Option { - self.variables.iter().enumerate().find_map(|(index, it)| { - matches!(it, PlanVariable::BlankNode(name) if name == bnode_name).then_some(index) - }) - } -} - -/// Represents a single part of the query plan processing. Each node is intended to provide a -/// specific behavior given an evaluation context. -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum QueryNode { - /// Match the triple pattern against the state. The triple elements can be either a variable or - /// a constant value, in the case of a variable it'll be either provided by the context of - /// previous evaluation or calculated and present in output. - TriplePattern { - subject: PatternValue, - predicate: PatternValue, - object: PatternValue, - }, - - /// Results in no solutions, this special node is used when we know before plan execution that a node - /// will end up with no possible solutions. For example, using a triple pattern filtering with a constant - /// named node containing a non-existing namespace. - Noop { bound_variables: Vec }, - - /// Join two nodes by applying the cartesian product of the nodes variables. - /// - /// This should be used when the nodes don't have variables in common, and can be seen as a - /// full join of disjoint datasets. - CartesianProductJoin { left: Box, right: Box }, - - /// Join two nodes by using the variables values from the left node as replacement in the right - /// node. - /// - /// This results to an inner join, but the underlying processing stream the variables from the - /// left node to use them as right node values. - ForLoopJoin { left: Box, right: Box }, - - /// Filter the results of the inner node by applying the expression. - Filter { expr: Expression, inner: Box }, - - /// Skip the specified first elements from the child node. - Skip { child: Box, first: usize }, - - /// Limit to the specified first elements from the child node. - Limit { child: Box, first: usize }, -} - -impl QueryNode { - pub fn noop() -> Self { - QueryNode::Noop { - bound_variables: Vec::new(), - } - } -} - -impl HasBoundVariables for QueryNode { - fn lookup_bound_variables(&self, callback: &mut impl FnMut(usize)) { - match self { - QueryNode::TriplePattern { - subject, - predicate, - object, - } => { - subject.lookup_bound_variable(callback); - predicate.lookup_bound_variable(callback); - object.lookup_bound_variable(callback); - } - QueryNode::Noop { bound_variables } => { - bound_variables.iter().for_each(|v| callback(*v)); - } - QueryNode::CartesianProductJoin { left, right } - | QueryNode::ForLoopJoin { left, right } => { - left.lookup_bound_variables(callback); - right.lookup_bound_variables(callback); - } - QueryNode::Filter { expr, inner } => { - expr.lookup_bound_variables(callback); - inner.lookup_bound_variables(callback); - } - QueryNode::Skip { child, .. } | QueryNode::Limit { child, .. } => { - child.lookup_bound_variables(callback); - } - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum PatternValue { - Constant(V), - Variable(usize), - /// Special variable that is expected to resolve as a blank node. - BlankVariable(usize), -} - -impl PatternValue { - pub fn lookup_bound_variable(&self, callback: &mut impl FnMut(usize)) { - if let PatternValue::Variable(v) | PatternValue::BlankVariable(v) = self { - callback(*v); - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::collections::BTreeSet; - - #[test] - fn bound_variables() { - let cases = vec![ - ( - QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Variable(2usize), - }, - BTreeSet::from([0usize, 1usize, 2usize]), - ), - ( - QueryNode::Noop { - bound_variables: vec![0usize, 1usize], - }, - BTreeSet::from([0usize, 1usize]), - ), - ( - QueryNode::Limit { - first: 20usize, - child: Box::new(QueryNode::Skip { - first: 20usize, - child: Box::new(QueryNode::ForLoopJoin { - left: Box::new(QueryNode::CartesianProductJoin { - left: Box::new(QueryNode::TriplePattern { - subject: PatternValue::BlankVariable(4usize), - predicate: PatternValue::Variable(5usize), - object: PatternValue::Variable(0usize), - }), - right: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(3usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::BlankVariable(4usize), - }), - }), - right: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Variable(2usize), - }), - }), - }), - }, - BTreeSet::from([0usize, 1usize, 2usize, 3usize, 4usize, 5usize]), - ), - ]; - - for case in cases { - assert_eq!(case.0.bound_variables(), case.1) - } - } - - #[test] - fn get_var_index() { - let plan = QueryPlan { - entrypoint: QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::BlankVariable(2usize), - }, - variables: vec![ - PlanVariable::Basic("1".to_string()), - PlanVariable::Basic("2".to_string()), - PlanVariable::BlankNode("3".to_string()), - ], - }; - - assert_eq!(plan.get_var_index("1"), Some(0usize)); - assert_eq!(plan.get_var_index("2"), Some(1usize)); - assert_eq!(plan.get_var_index("3"), None); - } -} diff --git a/contracts/axone-cognitarium/src/querier/plan_builder.rs b/contracts/axone-cognitarium/src/querier/plan_builder.rs deleted file mode 100644 index 8a7a2591..00000000 --- a/contracts/axone-cognitarium/src/querier/plan_builder.rs +++ /dev/null @@ -1,977 +0,0 @@ -use crate::parser; -use crate::parser::{ - Node, TriplePattern, VarOrNamedNode, VarOrNode, VarOrNodeOrLiteral, WhereClause, -}; -use crate::querier::expression::{Expression, Term}; -use crate::querier::mapper::{iri_as_node, literal_as_object}; -use crate::querier::plan::{PatternValue, PlanVariable, QueryNode, QueryPlan}; -use crate::querier::variable::HasBoundVariables; -use crate::state::{ - HasCachedNamespaces, Namespace, NamespaceQuerier, NamespaceResolver, Object, Predicate, Subject, -}; -use cosmwasm_std::{StdError, StdResult, Storage}; -use std::collections::HashMap; - -pub struct PlanBuilder<'a> { - ns_resolver: NamespaceResolver<'a>, - prefixes: &'a HashMap, - variables: Vec, - limit: Option, - skip: Option, -} - -impl<'a> PlanBuilder<'a> { - pub fn new( - storage: &'a dyn Storage, - prefixes: &'a HashMap, - ns_cache: Option>, - ) -> Self { - Self { - ns_resolver: NamespaceResolver::new(storage, ns_cache.unwrap_or_default()), - prefixes, - variables: Vec::new(), - skip: None, - limit: None, - } - } - - pub fn with_limit(mut self, limit: usize) -> Self { - self.limit = Some(limit); - self - } - - #[allow(dead_code)] - pub fn with_skip(mut self, skip: usize) -> Self { - self.skip = Some(skip); - self - } - - pub fn build_plan(&mut self, where_clause: &WhereClause) -> StdResult { - let mut node = self.build_node(where_clause)?; - - if let Some(skip) = self.skip { - node = QueryNode::Skip { - child: Box::new(node), - first: skip, - } - } - if let Some(limit) = self.limit { - node = QueryNode::Limit { - child: Box::new(node), - first: limit, - } - } - Ok(QueryPlan { - entrypoint: node, - variables: self.variables.clone(), - }) - } - - fn build_node(&mut self, where_clause: &WhereClause) -> StdResult { - match where_clause { - WhereClause::Bgp { patterns } => self.build_from_bgp(patterns.iter()), - WhereClause::LateralJoin { left, right } => Ok(QueryNode::ForLoopJoin { - left: Box::new(self.build_node(left)?), - right: Box::new(self.build_node(right)?), - }), - WhereClause::Filter { expr, inner } => { - let inner = Box::new(self.build_node(inner)?); - let expr = self.build_expression(expr)?; - - if !expr.bound_variables().is_subset(&inner.bound_variables()) { - return Err(StdError::generic_err( - "Unbound variable in filter expression", - )); - } - - Ok(QueryNode::Filter { expr, inner }) - } - } - } - - fn build_from_bgp<'b>( - &mut self, - bgp: impl Iterator, - ) -> StdResult { - bgp.map(|pattern| self.build_triple_pattern(pattern)) - .reduce(|acc, item| { - let acc = acc?; - let item = item?; - - if acc - .bound_variables() - .intersection(&item.bound_variables()) - .next() - .is_some() - { - Ok(QueryNode::ForLoopJoin { - left: Box::new(acc), - right: Box::new(item), - }) - } else { - Ok(QueryNode::CartesianProductJoin { - left: Box::new(acc), - right: Box::new(item), - }) - } - }) - .unwrap_or(Ok(QueryNode::noop())) - } - - fn build_expression(&mut self, expr: &parser::Expression) -> StdResult { - match expr { - parser::Expression::NamedNode(iri) => { - Term::from_iri(iri.clone(), self.prefixes).map(Expression::Constant) - } - parser::Expression::Literal(literal) => { - Term::from_literal(literal.clone(), self.prefixes).map(Expression::Constant) - } - parser::Expression::Variable(v) => Ok(Expression::Variable( - self.resolve_basic_variable(v.to_string()), - )), - parser::Expression::And(exprs) => exprs - .iter() - .map(|e| self.build_expression(e)) - .collect::>>() - .map(Expression::And), - parser::Expression::Or(exprs) => exprs - .iter() - .map(|e| self.build_expression(e)) - .collect::>>() - .map(Expression::Or), - parser::Expression::Equal(left, right) => Ok(Expression::Equal( - Box::new(self.build_expression(left)?), - Box::new(self.build_expression(right)?), - )), - parser::Expression::Greater(left, right) => Ok(Expression::Greater( - Box::new(self.build_expression(left)?), - Box::new(self.build_expression(right)?), - )), - parser::Expression::GreaterOrEqual(left, right) => Ok(Expression::GreaterOrEqual( - Box::new(self.build_expression(left)?), - Box::new(self.build_expression(right)?), - )), - parser::Expression::Less(left, right) => Ok(Expression::Less( - Box::new(self.build_expression(left)?), - Box::new(self.build_expression(right)?), - )), - parser::Expression::LessOrEqual(left, right) => Ok(Expression::LessOrEqual( - Box::new(self.build_expression(left)?), - Box::new(self.build_expression(right)?), - )), - parser::Expression::Not(child) => self - .build_expression(child) - .map(Box::new) - .map(Expression::Not), - } - } - - fn build_triple_pattern(&mut self, pattern: &TriplePattern) -> StdResult { - let subject_res = self.build_subject_pattern(pattern.subject.clone()); - let predicate_res = self.build_predicate_pattern(pattern.predicate.clone()); - let object_res = self.build_object_pattern(pattern.object.clone()); - - let mut bound_variables: Vec = vec![]; - let maybe_subject = - Self::recover_ns_not_found_pattern_res(subject_res, &mut bound_variables)?; - let maybe_predicate = - Self::recover_ns_not_found_pattern_res(predicate_res, &mut bound_variables)?; - let maybe_object = - Self::recover_ns_not_found_pattern_res(object_res, &mut bound_variables)?; - - Ok(match (maybe_subject, maybe_predicate, maybe_object) { - (Some(subject), Some(predicate), Some(object)) => QueryNode::TriplePattern { - subject, - predicate, - object, - }, - _ => QueryNode::Noop { bound_variables }, - }) - } - - fn recover_ns_not_found_pattern_res( - pattern_res: StdResult>, - bound_variables: &mut Vec, - ) -> StdResult>> { - Ok(match pattern_res { - Ok(value) => { - value.lookup_bound_variable(&mut |v| bound_variables.push(v)); - Some(value) - } - Err(err) if NamespaceQuerier::is_ns_not_found_error(&err) => None, - _ => Some(pattern_res?), - }) - } - - fn build_subject_pattern(&mut self, value: VarOrNode) -> StdResult> { - Ok(match value { - VarOrNode::Variable(v) => PatternValue::Variable(self.resolve_basic_variable(v)), - VarOrNode::Node(Node::BlankNode(b)) => { - PatternValue::BlankVariable(self.resolve_blank_variable(b)) - } - VarOrNode::Node(Node::NamedNode(iri)) => PatternValue::Constant(Subject::Named( - iri_as_node(&mut self.ns_resolver, self.prefixes, iri)?, - )), - }) - } - - fn build_predicate_pattern( - &mut self, - value: VarOrNamedNode, - ) -> StdResult> { - Ok(match value { - VarOrNamedNode::Variable(v) => PatternValue::Variable(self.resolve_basic_variable(v)), - VarOrNamedNode::NamedNode(iri) => { - PatternValue::Constant(iri_as_node(&mut self.ns_resolver, self.prefixes, iri)?) - } - }) - } - - fn build_object_pattern( - &mut self, - value: VarOrNodeOrLiteral, - ) -> StdResult> { - Ok(match value { - VarOrNodeOrLiteral::Variable(v) => { - PatternValue::Variable(self.resolve_basic_variable(v)) - } - VarOrNodeOrLiteral::Node(Node::BlankNode(b)) => { - PatternValue::BlankVariable(self.resolve_blank_variable(b)) - } - VarOrNodeOrLiteral::Node(Node::NamedNode(iri)) => PatternValue::Constant( - Object::Named(iri_as_node(&mut self.ns_resolver, self.prefixes, iri)?), - ), - VarOrNodeOrLiteral::Literal(l) => { - PatternValue::Constant(literal_as_object(&mut self.ns_resolver, self.prefixes, l)?) - } - }) - } - - fn resolve_basic_variable(&mut self, v: String) -> usize { - if let Some(index) = self.variables.iter().position(|var| match var { - PlanVariable::Basic(name) => name == &v, - PlanVariable::BlankNode(_) => false, - }) { - return index; - } - - self.variables.push(PlanVariable::Basic(v)); - self.variables.len() - 1 - } - - fn resolve_blank_variable(&mut self, v: String) -> usize { - if let Some(index) = self.variables.iter().position(|var| match var { - PlanVariable::BlankNode(name) => name == &v, - PlanVariable::Basic(_) => false, - }) { - return index; - } - - self.variables.push(PlanVariable::BlankNode(v)); - self.variables.len() - 1 - } -} - -impl<'a> HasCachedNamespaces for PlanBuilder<'a> { - fn cached_namespaces(&self) -> Vec { - self.ns_resolver.cached_namespaces() - } - - fn clear_cache(&mut self) { - self.ns_resolver.clear_cache(); - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::parser::{Literal, Node, Prefix, IRI}; - use crate::rdf::PrefixMap; - use crate::state; - use crate::state::{namespaces, Namespace}; - use cosmwasm_std::testing::mock_dependencies; - - #[test] - fn proper_initialization() { - let cases = vec![ - (vec![], HashMap::new()), - ( - vec![ - Prefix { - prefix: "owl".to_string(), - namespace: "http://www.w3.org/2002/07/owl#".to_string(), - }, - Prefix { - prefix: "rdf".to_string(), - namespace: "http://www.w3.org/1999/02/22-rdf-syntax-ns#".to_string(), - }, - ], - HashMap::from([ - ( - "owl".to_string(), - "http://www.w3.org/2002/07/owl#".to_string(), - ), - ( - "rdf".to_string(), - "http://www.w3.org/1999/02/22-rdf-syntax-ns#".to_string(), - ), - ]), - ), - ( - vec![ - Prefix { - prefix: "owl".to_string(), - namespace: "http://www.w3.org/2002/07/owl-will-be-overwritten#".to_string(), - }, - Prefix { - prefix: "owl".to_string(), - namespace: "http://www.w3.org/2002/07/owl#".to_string(), - }, - Prefix { - prefix: "rdf".to_string(), - namespace: "http://www.w3.org/1999/02/22-rdf-syntax-ns#".to_string(), - }, - ], - HashMap::from([ - ( - "owl".to_string(), - "http://www.w3.org/2002/07/owl#".to_string(), - ), - ( - "rdf".to_string(), - "http://www.w3.org/1999/02/22-rdf-syntax-ns#".to_string(), - ), - ]), - ), - ]; - let deps = mock_dependencies(); - - for case in cases { - let prefixes = &PrefixMap::from(case.0).into_inner(); - let builder = PlanBuilder::new(&deps.storage, prefixes, None); - assert_eq!(builder.skip, None); - assert_eq!(builder.limit, None); - assert_eq!(builder.variables, Vec::::new()); - assert_eq!(builder.prefixes, &case.1); - } - - let prefixes = &PrefixMap::default().into_inner(); - let mut builder = PlanBuilder::new(&deps.storage, prefixes, None); - builder = builder.with_skip(20usize).with_limit(50usize); - assert_eq!(builder.skip, Some(20usize)); - assert_eq!(builder.limit, Some(50usize)); - - builder = builder.with_skip(100usize).with_limit(5usize); - assert_eq!(builder.skip, Some(100usize)); - assert_eq!(builder.limit, Some(5usize)); - } - - #[test] - fn build_triple_pattern() { - let cases = vec![ - ( - TriplePattern { - subject: VarOrNode::Variable("s".to_string()), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - }, - Ok(QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Variable(2usize), - }), - ), - ( - TriplePattern { - subject: VarOrNode::Node(Node::BlankNode("1".to_string())), - predicate: VarOrNamedNode::NamedNode(IRI::Full( - "http://axone.space/hasTitle".to_string(), - )), - object: VarOrNodeOrLiteral::Node(Node::BlankNode("2".to_string())), - }, - Ok(QueryNode::TriplePattern { - subject: PatternValue::BlankVariable(0usize), - predicate: PatternValue::Constant(state::Node { - namespace: 0u128, - value: "hasTitle".to_string(), - }), - object: PatternValue::BlankVariable(1usize), - }), - ), - ( - TriplePattern { - subject: VarOrNode::Node(Node::NamedNode(IRI::Full( - "http://axone.space/123456789".to_string(), - ))), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Node(Node::NamedNode(IRI::Full( - "http://axone.space/1234567892".to_string(), - ))), - }, - Ok(QueryNode::TriplePattern { - subject: PatternValue::Constant(Subject::Named(state::Node { - namespace: 0u128, - value: "123456789".to_string(), - })), - predicate: PatternValue::Variable(0usize), - object: PatternValue::Constant(Object::Named(state::Node { - namespace: 0u128, - value: "1234567892".to_string(), - })), - }), - ), - ( - TriplePattern { - subject: VarOrNode::Variable("p".to_string()), - predicate: VarOrNamedNode::Variable("s".to_string()), - object: VarOrNodeOrLiteral::Literal(Literal::Simple("simple".to_string())), - }, - Ok(QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Constant(Object::Literal(state::Literal::Simple { - value: "simple".to_string(), - })), - }), - ), - ( - TriplePattern { - subject: VarOrNode::Variable("s".to_string()), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Literal(Literal::LanguageTaggedString { - value: "tagged".to_string(), - language: "en".to_string(), - }), - }, - Ok(QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Constant(Object::Literal(state::Literal::I18NString { - value: "tagged".to_string(), - language: "en".to_string(), - })), - }), - ), - ( - TriplePattern { - subject: VarOrNode::Variable("s".to_string()), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Literal(Literal::TypedValue { - value: "typed".to_string(), - datatype: IRI::Full("http://axone.space/type".to_string()), - }), - }, - Ok(QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Constant(Object::Literal(state::Literal::Typed { - value: "typed".to_string(), - datatype: state::Node { - namespace: 0u128, - value: "type".to_string(), - }, - })), - }), - ), - ( - TriplePattern { - subject: VarOrNode::Node(Node::NamedNode(IRI::Full( - "notexisting#outch".to_string(), - ))), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - }, - Ok(QueryNode::Noop { - bound_variables: vec![0usize, 1usize], - }), - ), - ( - TriplePattern { - subject: VarOrNode::Variable("s".to_string()), - predicate: VarOrNamedNode::NamedNode(IRI::Full( - "notexisting#outch".to_string(), - )), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - }, - Ok(QueryNode::Noop { - bound_variables: vec![0usize, 1usize], - }), - ), - ( - TriplePattern { - subject: VarOrNode::Variable("s".to_string()), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Node(Node::NamedNode(IRI::Full( - "notexisting#outch".to_string(), - ))), - }, - Ok(QueryNode::Noop { - bound_variables: vec![0usize, 1usize], - }), - ), - ]; - - let mut deps = mock_dependencies(); - - namespaces() - .save( - deps.as_mut().storage, - "http://axone.space/".to_string(), - &Namespace { - value: "http://axone.space/".to_string(), - key: 0u128, - counter: 1u128, - }, - ) - .unwrap(); - for case in cases { - let prefixes = &PrefixMap::default().into_inner(); - let mut builder = PlanBuilder::new(&deps.storage, prefixes, None); - - assert_eq!(builder.build_triple_pattern(&case.0), case.1); - } - } - - #[test] - fn build_bgp() { - let cases = vec![ - ( - vec![], - Ok(QueryNode::Noop { - bound_variables: vec![], - }), - ), - ( - vec![TriplePattern { - subject: VarOrNode::Node(Node::NamedNode(IRI::Full( - "notexisting#outch".to_string(), - ))), - predicate: VarOrNamedNode::Variable("predicate".to_string()), - object: VarOrNodeOrLiteral::Variable("object".to_string()), - }], - Ok(QueryNode::Noop { - bound_variables: vec![0usize, 1usize], - }), - ), - ( - vec![TriplePattern { - subject: VarOrNode::Variable("subject".to_string()), - predicate: VarOrNamedNode::Variable("predicate".to_string()), - object: VarOrNodeOrLiteral::Variable("object".to_string()), - }], - Ok(QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Variable(2usize), - }), - ), - ( - vec![TriplePattern { - subject: VarOrNode::Variable("subject".to_string()), - predicate: VarOrNamedNode::Variable("n".to_string()), - object: VarOrNodeOrLiteral::Variable("n".to_string()), - }], - Ok(QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Variable(1usize), - }), - ), - ( - vec![ - TriplePattern { - subject: VarOrNode::Variable("var1".to_string()), - predicate: VarOrNamedNode::Variable("var2".to_string()), - object: VarOrNodeOrLiteral::Variable("var3".to_string()), - }, - TriplePattern { - subject: VarOrNode::Variable("var4".to_string()), - predicate: VarOrNamedNode::Variable("var5".to_string()), - object: VarOrNodeOrLiteral::Variable("var6".to_string()), - }, - TriplePattern { - subject: VarOrNode::Variable("var1".to_string()), - predicate: VarOrNamedNode::Variable("var5".to_string()), - object: VarOrNodeOrLiteral::Node(Node::BlankNode("blank".to_string())), - }, - ], - Ok(QueryNode::ForLoopJoin { - left: Box::new(QueryNode::CartesianProductJoin { - left: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Variable(2usize), - }), - right: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(3usize), - predicate: PatternValue::Variable(4usize), - object: PatternValue::Variable(5usize), - }), - }), - right: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(4usize), - object: PatternValue::BlankVariable(6usize), - }), - }), - ), - ( - vec![ - TriplePattern { - subject: VarOrNode::Node(Node::BlankNode("1".to_string())), - predicate: VarOrNamedNode::Variable("1".to_string()), - object: VarOrNodeOrLiteral::Node(Node::BlankNode("2".to_string())), - }, - TriplePattern { - subject: VarOrNode::Node(Node::BlankNode("1".to_string())), - predicate: VarOrNamedNode::Variable("1".to_string()), - object: VarOrNodeOrLiteral::Variable("2".to_string()), - }, - ], - Ok(QueryNode::ForLoopJoin { - left: Box::new(QueryNode::TriplePattern { - subject: PatternValue::BlankVariable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::BlankVariable(2usize), - }), - right: Box::new(QueryNode::TriplePattern { - subject: PatternValue::BlankVariable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Variable(3usize), - }), - }), - ), - ]; - - let mut deps = mock_dependencies(); - namespaces() - .save( - deps.as_mut().storage, - "http://axone.space/".to_string(), - &Namespace { - value: "http://axone.space/".to_string(), - key: 0u128, - counter: 1u128, - }, - ) - .unwrap(); - - for case in cases { - let prefixes = &PrefixMap::default().into_inner(); - let mut builder = PlanBuilder::new(&deps.storage, prefixes, None); - - assert_eq!(builder.build_from_bgp(case.0.iter()), case.1) - } - } - - #[test] - fn build_expression() { - let cases = vec![ - ( - parser::Expression::NamedNode(IRI::Full("http://axone.space/test".to_string())), - Ok(Expression::Constant(Term::String( - "http://axone.space/test".to_string(), - ))), - ), - ( - parser::Expression::NamedNode(IRI::Prefixed("oups:test".to_string())), - Err(StdError::generic_err("Prefix not found: oups")), - ), - ( - parser::Expression::Literal(Literal::Simple("simple".to_string())), - Ok(Expression::Constant(Term::String("simple".to_string()))), - ), - ( - parser::Expression::Literal(Literal::TypedValue { - value: "typed".to_string(), - datatype: IRI::Prefixed("oups:type".to_string()), - }), - Err(StdError::generic_err("Prefix not found: oups")), - ), - ( - parser::Expression::Variable("variable".to_string()), - Ok(Expression::Variable(0usize)), - ), - ( - parser::Expression::And(vec![parser::Expression::Variable("variable".to_string())]), - Ok(Expression::And(vec![Expression::Variable(0usize)])), - ), - ( - parser::Expression::Or(vec![parser::Expression::Variable("variable".to_string())]), - Ok(Expression::Or(vec![Expression::Variable(0usize)])), - ), - ( - parser::Expression::Equal( - Box::new(parser::Expression::Variable("v1".to_string())), - Box::new(parser::Expression::Variable("v2".to_string())), - ), - Ok(Expression::Equal( - Box::new(Expression::Variable(0usize)), - Box::new(Expression::Variable(1usize)), - )), - ), - ( - parser::Expression::Greater( - Box::new(parser::Expression::Variable("v1".to_string())), - Box::new(parser::Expression::Variable("v2".to_string())), - ), - Ok(Expression::Greater( - Box::new(Expression::Variable(0usize)), - Box::new(Expression::Variable(1usize)), - )), - ), - ( - parser::Expression::GreaterOrEqual( - Box::new(parser::Expression::Variable("v1".to_string())), - Box::new(parser::Expression::Variable("v2".to_string())), - ), - Ok(Expression::GreaterOrEqual( - Box::new(Expression::Variable(0usize)), - Box::new(Expression::Variable(1usize)), - )), - ), - ( - parser::Expression::Less( - Box::new(parser::Expression::Variable("v1".to_string())), - Box::new(parser::Expression::Variable("v2".to_string())), - ), - Ok(Expression::Less( - Box::new(Expression::Variable(0usize)), - Box::new(Expression::Variable(1usize)), - )), - ), - ( - parser::Expression::LessOrEqual( - Box::new(parser::Expression::Variable("v1".to_string())), - Box::new(parser::Expression::Variable("v2".to_string())), - ), - Ok(Expression::LessOrEqual( - Box::new(Expression::Variable(0usize)), - Box::new(Expression::Variable(1usize)), - )), - ), - ( - parser::Expression::Not(Box::new(parser::Expression::Variable("v1".to_string()))), - Ok(Expression::Not(Box::new(Expression::Variable(0usize)))), - ), - ]; - - let deps = mock_dependencies(); - - for case in cases { - let prefixes = &PrefixMap::default().into_inner(); - let mut builder = PlanBuilder::new(&deps.storage, prefixes, None); - - assert_eq!(builder.build_expression(&case.0), case.1) - } - } - - #[test] - fn build_plan() { - let cases = vec![ - ( - None, - None, - WhereClause::Bgp { patterns: vec![] }, - Ok(QueryPlan { - entrypoint: QueryNode::Noop { - bound_variables: vec![], - }, - variables: vec![], - }), - ), - ( - Some(10usize), - None, - WhereClause::Bgp { patterns: vec![] }, - Ok(QueryPlan { - entrypoint: QueryNode::Skip { - child: Box::new(QueryNode::Noop { - bound_variables: vec![], - }), - first: 10usize, - }, - variables: vec![], - }), - ), - ( - None, - Some(10usize), - WhereClause::Bgp { patterns: vec![] }, - Ok(QueryPlan { - entrypoint: QueryNode::Limit { - child: Box::new(QueryNode::Noop { - bound_variables: vec![], - }), - first: 10usize, - }, - variables: vec![], - }), - ), - ( - Some(10usize), - Some(20usize), - WhereClause::Bgp { patterns: vec![] }, - Ok(QueryPlan { - entrypoint: QueryNode::Limit { - child: Box::new(QueryNode::Skip { - child: Box::new(QueryNode::Noop { - bound_variables: vec![], - }), - first: 10usize, - }), - first: 20usize, - }, - variables: vec![], - }), - ), - ( - None, - None, - WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Variable("subject".to_string()), - predicate: VarOrNamedNode::Variable("predicate".to_string()), - object: VarOrNodeOrLiteral::Variable("object".to_string()), - }], - }, - Ok(QueryPlan { - entrypoint: QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Variable(2usize), - }, - variables: vec![ - PlanVariable::Basic("subject".to_string()), - PlanVariable::Basic("predicate".to_string()), - PlanVariable::Basic("object".to_string()), - ], - }), - ), - ( - None, - None, - WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Variable("subject".to_string()), - predicate: VarOrNamedNode::Variable("n".to_string()), - object: VarOrNodeOrLiteral::Variable("n".to_string()), - }], - }, - Ok(QueryPlan { - entrypoint: QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Variable(1usize), - }, - variables: vec![ - PlanVariable::Basic("subject".to_string()), - PlanVariable::Basic("n".to_string()), - ], - }), - ), - ( - None, - None, - WhereClause::LateralJoin { - left: Box::new(WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(Node::BlankNode("1".to_string())), - predicate: VarOrNamedNode::Variable("n".to_string()), - object: VarOrNodeOrLiteral::Node(Node::BlankNode("2".to_string())), - }], - }), - right: Box::new(WhereClause::Bgp { patterns: vec![] }), - }, - Ok(QueryPlan { - entrypoint: QueryNode::ForLoopJoin { - left: Box::new(QueryNode::TriplePattern { - subject: PatternValue::BlankVariable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::BlankVariable(2usize), - }), - right: Box::new(QueryNode::Noop { - bound_variables: vec![], - }), - }, - variables: vec![ - PlanVariable::BlankNode("1".to_string()), - PlanVariable::Basic("n".to_string()), - PlanVariable::BlankNode("2".to_string()), - ], - }), - ), - ( - None, - None, - WhereClause::Filter { - inner: Box::new(WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Variable("1".to_string()), - predicate: VarOrNamedNode::Variable("2".to_string()), - object: VarOrNodeOrLiteral::Variable("2".to_string()), - }], - }), - expr: parser::Expression::Variable("1".to_string()), - }, - Ok(QueryPlan { - entrypoint: QueryNode::Filter { - inner: Box::new(QueryNode::TriplePattern { - subject: PatternValue::Variable(0usize), - predicate: PatternValue::Variable(1usize), - object: PatternValue::Variable(1usize), - }), - expr: Expression::Variable(0usize), - }, - variables: vec![ - PlanVariable::Basic("1".to_string()), - PlanVariable::Basic("2".to_string()), - ], - }), - ), - ( - None, - None, - WhereClause::Filter { - inner: Box::new(WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Variable("1".to_string()), - predicate: VarOrNamedNode::Variable("2".to_string()), - object: VarOrNodeOrLiteral::Variable("2".to_string()), - }], - }), - expr: parser::Expression::Variable("oups".to_string()), - }, - Err(StdError::generic_err( - "Unbound variable in filter expression", - )), - ), - ]; - - let mut deps = mock_dependencies(); - namespaces() - .save( - deps.as_mut().storage, - "http://axone.space/".to_string(), - &Namespace { - value: "http://axone.space/".to_string(), - key: 0u128, - counter: 1u128, - }, - ) - .unwrap(); - - for case in cases { - let prefixes = &PrefixMap::default().into_inner(); - let mut builder = PlanBuilder::new(&deps.storage, prefixes, None); - if let Some(skip) = case.0 { - builder = builder.with_skip(skip); - } - if let Some(limit) = case.1 { - builder = builder.with_limit(limit); - } - - assert_eq!(builder.build_plan(&case.2), case.3) - } - } -} diff --git a/contracts/axone-cognitarium/src/querier/variable.rs b/contracts/axone-cognitarium/src/querier/variable.rs deleted file mode 100644 index 6325b04d..00000000 --- a/contracts/axone-cognitarium/src/querier/variable.rs +++ /dev/null @@ -1,496 +0,0 @@ -use crate::parser::{Value, IRI}; -use crate::querier::expression::Term; -use crate::state::{Literal, NamespaceSolver, Object, Predicate, Subject}; -use axone_rdf::normalize::IdentifierIssuer; -use cosmwasm_std::StdResult; -use std::collections::BTreeSet; - -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum ResolvedVariable { - Subject(Subject), - Predicate(Predicate), - Object(Object), -} - -impl ResolvedVariable { - pub fn as_subject(&self) -> Option { - Some(match self { - ResolvedVariable::Subject(s) => s.clone(), - ResolvedVariable::Predicate(p) => Subject::Named(p.clone()), - ResolvedVariable::Object(o) => match o { - Object::Named(node) => Subject::Named(node.clone()), - Object::Blank(node) => Subject::Blank(*node), - Object::Literal(_) => None?, - }, - }) - } - - pub fn as_predicate(&self) -> Option { - Some(match self { - ResolvedVariable::Subject(s) => match s { - Subject::Named(node) => node.clone(), - Subject::Blank(_) => None?, - }, - ResolvedVariable::Predicate(p) => p.clone(), - ResolvedVariable::Object(o) => match o { - Object::Named(node) => node.clone(), - Object::Blank(_) | Object::Literal(_) => None?, - }, - }) - } - - #[allow(clippy::unnecessary_wraps)] - pub fn as_object(&self) -> Option { - Some(match self { - ResolvedVariable::Subject(s) => match s { - Subject::Named(node) => Object::Named(node.clone()), - Subject::Blank(node) => Object::Blank(*node), - }, - ResolvedVariable::Predicate(p) => Object::Named(p.clone()), - ResolvedVariable::Object(o) => o.clone(), - }) - } - - pub fn as_value( - &self, - ns_fn: &mut dyn NamespaceSolver, - id_issuer: &mut IdentifierIssuer, - ) -> StdResult { - Ok(match self { - ResolvedVariable::Subject(subject) => match subject { - Subject::Named(named) => named.as_iri(ns_fn).map(|iri| Value::URI { - value: IRI::Full(iri), - })?, - Subject::Blank(blank) => Value::BlankNode { - value: id_issuer.get_str_or_issue(blank.to_string()).to_string(), - }, - }, - ResolvedVariable::Predicate(predicate) => { - predicate.as_iri(ns_fn).map(|iri| Value::URI { - value: IRI::Full(iri), - })? - } - ResolvedVariable::Object(object) => match object { - Object::Named(named) => Value::URI { - value: IRI::Full(named.as_iri(ns_fn)?), - }, - Object::Blank(blank) => Value::BlankNode { - value: id_issuer.get_str_or_issue(blank.to_string()).to_string(), - }, - Object::Literal(literal) => match literal { - Literal::Simple { value } => Value::Literal { - value: value.clone(), - lang: None, - datatype: None, - }, - Literal::I18NString { value, language } => Value::Literal { - value: value.clone(), - lang: Some(language.clone()), - datatype: None, - }, - Literal::Typed { value, datatype } => Value::Literal { - value: value.clone(), - lang: None, - datatype: Some(datatype.as_iri(ns_fn).map(IRI::Full)?), - }, - }, - }, - }) - } - - pub fn as_term(&self, ns_solver: &mut dyn NamespaceSolver) -> StdResult { - Ok(match self { - ResolvedVariable::Subject(subject) => match subject { - Subject::Named(named) => named.as_iri(ns_solver).map(Term::String)?, - Subject::Blank(blank) => Term::String(format!("_:{}", blank)), - }, - ResolvedVariable::Predicate(predicate) => { - predicate.as_iri(ns_solver).map(Term::String)? - } - ResolvedVariable::Object(object) => match object { - Object::Named(named) => named.as_iri(ns_solver).map(Term::String)?, - Object::Blank(blank) => Term::String(format!("_:{}", blank)), - Object::Literal(literal) => Term::String(match literal { - Literal::Simple { value } => value.clone(), - Literal::I18NString { value, language } => { - format!("{}{}", value, language) - } - Literal::Typed { value, datatype } => { - format!("{}{}", value, datatype.as_iri(ns_solver)?) - } - }), - }, - }) - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct ResolvedVariables { - variables: Vec>, -} - -impl ResolvedVariables { - pub fn with_capacity(cap: usize) -> Self { - let mut variables = Vec::with_capacity(cap); - for i in 0..cap { - variables.insert(i, None); - } - - Self { variables } - } - - /// Merge with another set of resolved variables, returns None if a variable is set on both side - /// with different values. - pub fn merge_with(&self, other: &Self) -> Option { - let mut merged = other.variables.clone(); - - for (key, var) in self.variables.iter().enumerate() { - if let Some(val) = var { - match &other.variables[key] { - Some(other_val) => { - if val != other_val { - return None; - } - } - None => merged[key] = Some(val.clone()), - } - } - } - - Some(Self { variables: merged }) - } - - pub fn merge_index(&mut self, index: usize, var: ResolvedVariable) -> Option<()> { - if let Some(old) = self.get(index) { - (*old == var).then_some(()) - } else { - self.variables[index] = Some(var); - Some(()) - } - } - - pub fn get(&self, index: usize) -> &Option { - self.variables.get(index).unwrap_or(&None) - } -} - -pub trait HasBoundVariables { - fn bound_variables(&self) -> BTreeSet { - let mut vars = BTreeSet::new(); - self.lookup_bound_variables(&mut |v| { - vars.insert(v); - }); - vars - } - - fn lookup_bound_variables(&self, callback: &mut impl FnMut(usize)); -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::state::{InMemoryNamespaceSolver, Literal, Node}; - use cosmwasm_std::StdError; - - #[test] - fn conversions() { - let cases: Vec<(Option, Option, Option)> = vec![ - ( - Some(Subject::Blank(0u128)), - None, - Some(Object::Blank(0u128)), - ), - ( - Some(Subject::Named(Node { - namespace: 4, - value: "test".to_string(), - })), - Some(Node { - namespace: 4, - value: "test".to_string(), - }), - Some(Object::Named(Node { - namespace: 4, - value: "test".to_string(), - })), - ), - ( - None, - None, - Some(Object::Literal(Literal::I18NString { - value: "test".to_string(), - language: "en".to_string(), - })), - ), - ]; - - for (s, p, o) in cases { - if let Some(ref subject) = s { - let subject = ResolvedVariable::Subject(subject.clone()); - assert_eq!(subject.as_subject(), s); - assert_eq!(subject.as_predicate(), p); - assert_eq!(subject.as_object(), o); - } - if let Some(ref predicate) = p { - let predicate = ResolvedVariable::Predicate(predicate.clone()); - assert_eq!(predicate.as_subject(), s); - assert_eq!(predicate.as_predicate(), p); - assert_eq!(predicate.as_object(), o); - } - if let Some(ref object) = o { - let object = ResolvedVariable::Object(object.clone()); - assert_eq!(object.as_subject(), s); - assert_eq!(object.as_predicate(), p); - assert_eq!(object.as_object(), o); - } - } - } - - #[test] - fn values() { - let cases = vec![ - ( - ResolvedVariable::Subject(Subject::Named(Node { - namespace: 0, - value: "bar".to_string(), - })), - Ok(Value::URI { - value: IRI::Full("foobar".to_string()), - }), - ), - ( - ResolvedVariable::Subject(Subject::Blank(0u128)), - Ok(Value::BlankNode { - value: "b0".to_string(), - }), - ), - ( - ResolvedVariable::Predicate(Node { - namespace: 1, - value: "foo".to_string(), - }), - Ok(Value::URI { - value: IRI::Full("barfoo".to_string()), - }), - ), - ( - ResolvedVariable::Object(Object::Named(Node { - namespace: 1, - value: "foo".to_string(), - })), - Ok(Value::URI { - value: IRI::Full("barfoo".to_string()), - }), - ), - ( - ResolvedVariable::Object(Object::Blank(0u128)), - Ok(Value::BlankNode { - value: "b0".to_string(), - }), - ), - ( - ResolvedVariable::Object(Object::Literal(Literal::Simple { - value: "foo".to_string(), - })), - Ok(Value::Literal { - value: "foo".to_string(), - lang: None, - datatype: None, - }), - ), - ( - ResolvedVariable::Object(Object::Literal(Literal::I18NString { - value: "foo".to_string(), - language: "fr".to_string(), - })), - Ok(Value::Literal { - value: "foo".to_string(), - lang: Some("fr".to_string()), - datatype: None, - }), - ), - ( - ResolvedVariable::Object(Object::Literal(Literal::Typed { - value: "foo".to_string(), - datatype: Node { - namespace: 0, - value: "bar".to_string(), - }, - })), - Ok(Value::Literal { - value: "foo".to_string(), - lang: None, - datatype: Some(IRI::Full("foobar".to_string())), - }), - ), - ( - ResolvedVariable::Subject(Subject::Named(Node { - namespace: 12, - value: "unknown".to_string(), - })), - Err(StdError::not_found("Namespace")), - ), - ( - ResolvedVariable::Predicate(Node { - namespace: 12, - value: "unknown".to_string(), - }), - Err(StdError::not_found("Namespace")), - ), - ( - ResolvedVariable::Object(Object::Named(Node { - namespace: 12, - value: "unknown".to_string(), - })), - Err(StdError::not_found("Namespace")), - ), - ( - ResolvedVariable::Object(Object::Literal(Literal::Typed { - datatype: Node { - namespace: 12, - value: "unknown".to_string(), - }, - value: "unknown".to_string(), - })), - Err(StdError::not_found("Namespace")), - ), - ]; - - let mut id_issuer = IdentifierIssuer::new("b", 0u128); - let mut ns_solver = InMemoryNamespaceSolver::with(vec![(0, "foo"), (1, "bar")]); - for (var, expected) in cases { - assert_eq!(var.as_value(&mut ns_solver, &mut id_issuer), expected) - } - } - - #[test] - fn merged_variables() { - let mut vars1 = ResolvedVariables::with_capacity(3); - vars1.merge_index(0, ResolvedVariable::Object(Object::Blank(0u128))); - vars1.merge_index(2, ResolvedVariable::Object(Object::Blank(1u128))); - - let mut vars2 = ResolvedVariables::with_capacity(3); - vars2.merge_index(1, ResolvedVariable::Object(Object::Blank(2u128))); - vars2.merge_index(2, ResolvedVariable::Object(Object::Blank(1u128))); - - assert_eq!( - vars2.get(1), - &Some(ResolvedVariable::Object(Object::Blank(2u128))) - ); - assert_eq!(vars1.get(1), &None); - - let mut expected_result = ResolvedVariables::with_capacity(3); - expected_result.merge_index(0, ResolvedVariable::Object(Object::Blank(0u128))); - expected_result.merge_index(1, ResolvedVariable::Object(Object::Blank(2u128))); - expected_result.merge_index(2, ResolvedVariable::Object(Object::Blank(1u128))); - - let result = vars1.merge_with(&vars2); - assert_eq!(result, Some(expected_result)); - - let mut vars3 = ResolvedVariables::with_capacity(3); - vars3.merge_index(1, ResolvedVariable::Object(Object::Blank(2u128))); - vars3.merge_index( - 2, - ResolvedVariable::Predicate(Node { - namespace: 0, - value: "".to_string(), - }), - ); - let result2 = vars1.merge_with(&vars3); - assert_eq!(result2, None); - } - - #[test] - fn terms() { - let cases = vec![ - ( - ResolvedVariable::Subject(Subject::Named(Node { - namespace: 0, - value: "bar".to_string(), - })), - Ok(Term::String("foobar".to_string())), - ), - ( - ResolvedVariable::Subject(Subject::Blank(0u128)), - Ok(Term::String("_:0".to_string())), - ), - ( - ResolvedVariable::Predicate(Node { - namespace: 1, - value: "foo".to_string(), - }), - Ok(Term::String("barfoo".to_string())), - ), - ( - ResolvedVariable::Object(Object::Named(Node { - namespace: 1, - value: "foo".to_string(), - })), - Ok(Term::String("barfoo".to_string())), - ), - ( - ResolvedVariable::Object(Object::Blank(0u128)), - Ok(Term::String("_:0".to_string())), - ), - ( - ResolvedVariable::Object(Object::Literal(Literal::Simple { - value: "foo".to_string(), - })), - Ok(Term::String("foo".to_string())), - ), - ( - ResolvedVariable::Object(Object::Literal(Literal::I18NString { - value: "foo".to_string(), - language: "fr".to_string(), - })), - Ok(Term::String("foofr".to_string())), - ), - ( - ResolvedVariable::Object(Object::Literal(Literal::Typed { - value: "foo".to_string(), - datatype: Node { - namespace: 0, - value: "bar".to_string(), - }, - })), - Ok(Term::String("foofoobar".to_string())), - ), - ( - ResolvedVariable::Subject(Subject::Named(Node { - namespace: 12, - value: "unknown".to_string(), - })), - Err(StdError::not_found("Namespace")), - ), - ( - ResolvedVariable::Predicate(Node { - namespace: 12, - value: "unknown".to_string(), - }), - Err(StdError::not_found("Namespace")), - ), - ( - ResolvedVariable::Object(Object::Named(Node { - namespace: 12, - value: "unknown".to_string(), - })), - Err(StdError::not_found("Namespace")), - ), - ( - ResolvedVariable::Object(Object::Literal(Literal::Typed { - datatype: Node { - namespace: 12, - value: "unknown".to_string(), - }, - value: "unknown".to_string(), - })), - Err(StdError::not_found("Namespace")), - ), - ]; - - let mut ns_solver = InMemoryNamespaceSolver::with(vec![(0, "foo"), (1, "bar")]); - for (var, expected) in cases { - assert_eq!(var.as_term(&mut ns_solver), expected) - } - } -} diff --git a/contracts/axone-cognitarium/src/rdf/atom.rs b/contracts/axone-cognitarium/src/rdf/atom.rs deleted file mode 100644 index f66e6e2f..00000000 --- a/contracts/axone-cognitarium/src/rdf/atom.rs +++ /dev/null @@ -1,147 +0,0 @@ -use rio_api::model::{Literal, NamedNode, Triple}; -use std::fmt; - -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub enum Subject { - NamedNode(String), - BlankNode(String), -} - -impl fmt::Display for Subject { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Subject::NamedNode(s) | Subject::BlankNode(s) => write!(f, "{s}"), - } - } -} - -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub struct Property(pub String); - -impl fmt::Display for Property { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.0) - } -} - -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub enum Value { - NamedNode(String), - BlankNode(String), - LiteralSimple(String), - LiteralLang(String, String), - LiteralDatatype(String, String), -} - -impl fmt::Display for Value { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Value::NamedNode(s) | Value::BlankNode(s) | Value::LiteralSimple(s) => write!(f, "{s}"), - Value::LiteralLang(s, l) => write!(f, "{s}@{l}"), - Value::LiteralDatatype(s, d) => write!(f, "{s}^^{d}"), - } - } -} - -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub struct Atom { - pub subject: Subject, - pub property: Property, - pub value: Value, -} - -impl fmt::Display for Atom { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "<{}> <{}> '{}'", self.subject, self.property, self.value) - } -} - -impl<'a> From<&'a Atom> for Triple<'a> { - fn from(atom: &'a Atom) -> Self { - Triple { - subject: match &atom.subject { - Subject::NamedNode(s) | Subject::BlankNode(s) => NamedNode { iri: s.as_str() }, - } - .into(), - predicate: NamedNode { - iri: &atom.property.0, - }, - object: match &atom.value { - Value::NamedNode(s) | Value::BlankNode(s) => NamedNode { iri: s.as_str() }.into(), - Value::LiteralSimple(s) => Literal::Simple { value: s.as_str() }.into(), - Value::LiteralLang(s, l) => Literal::LanguageTaggedString { - value: s, - language: l, - } - .into(), - Value::LiteralDatatype(s, d) => Literal::Typed { - value: s, - datatype: NamedNode { iri: d }, - } - .into(), - }, - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn proper_display() { - struct TC<'a> { - input: Box, - expected: String, - } - let cases = vec![ - // # Subject - TC { - input: Box::new(Subject::BlankNode("blank".into())), - expected: "blank".into(), - }, - TC { - input: Box::new(Subject::NamedNode("named".into())), - expected: "named".into(), - }, - // # Property - TC { - input: Box::new(Property("foo".into())), - expected: "foo".into(), - }, - // # Value - TC { - input: Box::new(Value::NamedNode("named".into())), - expected: "named".into(), - }, - TC { - input: Box::new(Value::BlankNode("blank".into())), - expected: "blank".into(), - }, - TC { - input: Box::new(Value::LiteralSimple("simple".into())), - expected: "simple".into(), - }, - TC { - input: Box::new(Value::LiteralLang("lang".into(), "en".into())), - expected: "lang@en".into(), - }, - TC { - input: Box::new(Value::LiteralDatatype("data".into(), "uri".into())), - expected: "data^^uri".into(), - }, - // # Atom - TC { - input: Box::new(Atom { - subject: Subject::NamedNode("subject".into()), - property: Property("predicate".into()), - value: Value::LiteralLang("object".into(), "en".into()), - }), - expected: " 'object@en'".into(), - }, - ]; - for tc in cases { - assert_eq!(format!("{}", tc.input), tc.expected); - } - } -} diff --git a/contracts/axone-cognitarium/src/rdf/mapper.rs b/contracts/axone-cognitarium/src/rdf/mapper.rs deleted file mode 100644 index 2a4092f6..00000000 --- a/contracts/axone-cognitarium/src/rdf/mapper.rs +++ /dev/null @@ -1,95 +0,0 @@ -use crate::parser; -use crate::rdf::{Property, Subject, Value}; -use axone_rdf::uri::expand_uri; -use cosmwasm_std::StdError; -use std::collections::HashMap; - -impl TryFrom<(parser::Node, &HashMap)> for Subject { - type Error = StdError; - - fn try_from( - (node, prefixes): (parser::Node, &HashMap), - ) -> Result { - match node { - parser::Node::BlankNode(id) => Ok(Subject::BlankNode(id)), - parser::Node::NamedNode(parser::IRI::Full(uri)) => Ok(Subject::NamedNode(uri)), - parser::Node::NamedNode(parser::IRI::Prefixed(curie)) => { - Ok(Subject::NamedNode(expand_uri(&curie, prefixes)?)) - } - } - } -} - -impl TryFrom<(parser::IRI, &HashMap)> for Property { - type Error = StdError; - - fn try_from( - (iri, prefixes): (parser::IRI, &HashMap), - ) -> Result { - match iri { - parser::IRI::Full(uri) => Ok(Property(uri)), - parser::IRI::Prefixed(curie) => Ok(Property(expand_uri(&curie, prefixes)?)), - } - } -} - -impl TryFrom<(parser::Node, &HashMap)> for Value { - type Error = StdError; - - fn try_from( - (node, prefixes): (parser::Node, &HashMap), - ) -> Result { - match node { - parser::Node::NamedNode(parser::IRI::Full(uri)) => Ok(Value::NamedNode(uri)), - parser::Node::NamedNode(parser::IRI::Prefixed(curie)) => { - Ok(Value::NamedNode(expand_uri(&curie, prefixes)?)) - } - parser::Node::BlankNode(id) => Ok(Value::BlankNode(id)), - } - } -} - -impl TryFrom<(parser::Literal, &HashMap)> for Value { - type Error = StdError; - - fn try_from( - (literal, prefixes): (parser::Literal, &HashMap), - ) -> Result { - match literal { - parser::Literal::Simple(value) => Ok(Value::LiteralSimple(value)), - parser::Literal::LanguageTaggedString { value, language } => { - Ok(Value::LiteralLang(value, language)) - } - parser::Literal::TypedValue { - value, - datatype: parser::IRI::Full(uri), - } => Ok(Value::LiteralDatatype(value, uri)), - parser::Literal::TypedValue { - value, - datatype: parser::IRI::Prefixed(prefix), - } => Ok(Value::LiteralDatatype( - value, - expand_uri(&prefix, prefixes)?, - )), - } - } -} - -#[derive(Default)] -pub struct PrefixMap(HashMap); -impl PrefixMap { - pub fn into_inner(self) -> HashMap { - self.0 - } -} - -impl From> for PrefixMap { - fn from(as_list: Vec) -> Self { - PrefixMap( - as_list - .into_iter() - .map(|prefix| (prefix.prefix, prefix.namespace)) - .collect(), - ) - } -} diff --git a/contracts/axone-cognitarium/src/rdf/mod.rs b/contracts/axone-cognitarium/src/rdf/mod.rs deleted file mode 100644 index 063ead0f..00000000 --- a/contracts/axone-cognitarium/src/rdf/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod atom; -mod mapper; - -pub use self::atom::*; -pub use self::mapper::*; diff --git a/contracts/axone-cognitarium/src/state/blank_nodes.rs b/contracts/axone-cognitarium/src/state/blank_nodes.rs deleted file mode 100644 index 199be8b6..00000000 --- a/contracts/axone-cognitarium/src/state/blank_nodes.rs +++ /dev/null @@ -1,4 +0,0 @@ -use cw_storage_plus::Item; - -/// A counter serving as blank node unique identifier generator. -pub const BLANK_NODE_IDENTIFIER_COUNTER: Item = Item::new("blank_node_key"); diff --git a/contracts/axone-cognitarium/src/state/mod.rs b/contracts/axone-cognitarium/src/state/mod.rs deleted file mode 100644 index 3200463d..00000000 --- a/contracts/axone-cognitarium/src/state/mod.rs +++ /dev/null @@ -1,14 +0,0 @@ -mod blank_nodes; -mod namespaces; -mod store; -mod triples; - -pub use blank_nodes::*; -pub use namespaces::*; -pub use store::*; -pub use triples::*; - -#[cfg(test)] -mod test_util; -#[cfg(test)] -pub use test_util::*; diff --git a/contracts/axone-cognitarium/src/state/namespaces.rs b/contracts/axone-cognitarium/src/state/namespaces.rs deleted file mode 100644 index d3e982fd..00000000 --- a/contracts/axone-cognitarium/src/state/namespaces.rs +++ /dev/null @@ -1,345 +0,0 @@ -use cosmwasm_std::{StdError, StdResult, Storage}; -use cw_storage_plus::{Index, IndexList, IndexedMap, Item, UniqueIndex}; -use serde::{Deserialize, Serialize}; -use std::cell::RefCell; -use std::collections::BTreeMap; -use std::rc::Rc; - -/// Store a key increment used a unique key for referencing a namespace. Given the size of an `u128` -/// there is no need to implement a garbage collector mechanism in case some namespaces are removed. -pub const NAMESPACE_KEY_INCREMENT: Item = Item::new("namespace_key"); - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct Namespace { - /// The namespace value. - pub value: String, - - /// The unique, incremented key issues to reference this namespace from a triple IRI. - pub key: u128, - - /// A reference counter to this namespace. - pub counter: u128, -} - -pub struct NamespaceIndexes<'a> { - pub key: UniqueIndex<'a, u128, Namespace, String>, -} - -impl IndexList for NamespaceIndexes<'_> { - fn get_indexes(&self) -> Box> + '_> { - let key: &dyn Index = &self.key; - Box::new(vec![key].into_iter()) - } -} - -pub fn namespaces<'a>() -> IndexedMap> { - IndexedMap::new( - "NAMESPACE", - NamespaceIndexes { - key: UniqueIndex::new(|ns| ns.key, "NAMESPACE__KEY"), - }, - ) -} - -/// [NamespaceQuerier] is a [Namespace] querying service allowing to resolve namespaces either by -/// namespace's value or namespace's internal state key. It implements a two way indexed in-memory -/// cache to mitigate state access. -pub struct NamespaceQuerier { - by_val: BTreeMap>>, - by_key: BTreeMap>>, -} - -impl NamespaceQuerier { - pub fn new() -> Self { - Self { - by_key: BTreeMap::new(), - by_val: BTreeMap::new(), - } - } - - /// Resolve a [Namespace] from its value, returning it from cache in priority before accessing - /// the state. - pub fn resolve_from_val( - &mut self, - storage: &dyn Storage, - value: String, - ) -> StdResult> { - self.resolve_cell_from_val(storage, value) - .map(|maybe_cell| maybe_cell.map(|cell| cell.borrow().clone())) - } - - /// Resolve a [Namespace] from its internal key, returning it from cache in priority before accessing - /// the state. - pub fn resolve_from_key( - &mut self, - storage: &dyn Storage, - key: u128, - ) -> StdResult> { - self.resolve_cell_from_key(storage, key) - .map(|maybe_cell| maybe_cell.map(|cell| cell.borrow().clone())) - } - - /// Resolve a counting reference to a memory location of a cached [Namespace] from its value, - /// returning it from cache in priority before accessing the state. It allows to mutate it in place. - fn resolve_cell_from_val( - &mut self, - storage: &dyn Storage, - value: String, - ) -> StdResult>>> { - if let Some(rc) = self.by_val.get(value.as_str()) { - return Ok(Some(rc.clone())); - } - - namespaces() - .may_load(storage, value) - .map(|maybe_ns| maybe_ns.map(|ns| self.insert(ns))) - } - - /// Resolve a counting reference to a memory location of a cached [Namespace] from its key, - /// returning it from cache in priority before accessing the state. It allows to mutate it in place. - fn resolve_cell_from_key( - &mut self, - storage: &dyn Storage, - key: u128, - ) -> StdResult>>> { - if let Some(rc) = self.by_key.get(&key) { - return Ok(Some(rc.clone())); - } - - namespaces() - .idx - .key - .item(storage, key) - .map(|maybe_ns| maybe_ns.map(|ns| self.insert(ns.1))) - } - - /// Cache a namespace by creating a dedicated mutable memory location shared between indexes - /// returning a counted reference to it. - fn insert(&mut self, ns: Namespace) -> Rc> { - let ns_rc = Rc::new(RefCell::new(ns.clone())); - - self.by_val.insert(ns.value, ns_rc.clone()); - self.by_key.insert(ns.key, ns_rc.clone()); - - ns_rc - } - - /// Utility middleware to consider `StdResult::Ok(None)` as `Err(StdError::NotFound)` of namespace. - /// Typically used with [Self::resolve_from_key]. - pub fn none_as_error_middleware(resolve_res: Option) -> StdResult { - match resolve_res { - Some(ns) => Ok(ns), - None => Err(StdError::not_found("Namespace")), - } - } - - pub fn is_ns_not_found_error(err: &StdError) -> bool { - matches!(err, StdError::NotFound { kind, .. } if kind == "Namespace") - } -} - -impl Default for NamespaceQuerier { - fn default() -> Self { - Self::new() - } -} - -/// Used when managing an internal [Namespace] cache to expose it, the purpose is to allow the cache -/// to be reusable. -pub trait HasCachedNamespaces { - /// Return the cached namespaces. - fn cached_namespaces(&self) -> Vec; - - /// Empty the namespace cache. - fn clear_cache(&mut self); -} - -impl HasCachedNamespaces for NamespaceQuerier { - fn cached_namespaces(&self) -> Vec { - self.by_key - .iter() - .map(|cell| cell.1.borrow().clone()) - .collect() - } - - fn clear_cache(&mut self) { - self.by_val.clear(); - self.by_key.clear(); - } -} - -impl From> for NamespaceQuerier { - fn from(value: Vec) -> Self { - let mut resolver = NamespaceQuerier::new(); - for ns in value { - resolver.insert(ns); - } - - resolver - } -} - -pub trait NamespaceSolver { - fn resolve_from_key(&mut self, key: u128) -> StdResult; - fn resolve_from_val(&mut self, value: String) -> StdResult; -} - -pub struct NamespaceResolver<'a> { - storage: &'a dyn Storage, - ns_querier: NamespaceQuerier, -} - -impl<'a> NamespaceResolver<'a> { - pub fn new(storage: &'a dyn Storage, ns_cache: Vec) -> Self { - Self { - storage, - ns_querier: ns_cache.into(), - } - } -} - -impl<'a> NamespaceSolver for NamespaceResolver<'a> { - fn resolve_from_key(&mut self, key: u128) -> StdResult { - self.ns_querier - .resolve_from_key(self.storage, key) - .and_then(NamespaceQuerier::none_as_error_middleware) - } - - fn resolve_from_val(&mut self, value: String) -> StdResult { - self.ns_querier - .resolve_from_val(self.storage, value) - .and_then(NamespaceQuerier::none_as_error_middleware) - } -} - -impl<'a> HasCachedNamespaces for NamespaceResolver<'a> { - fn cached_namespaces(&self) -> Vec { - self.ns_querier.cached_namespaces() - } - - fn clear_cache(&mut self) { - self.ns_querier.clear_cache(); - } -} - -/// Allow to batch write operations on [Namespace] taking care of the [NAMESPACE_KEY_INCREMENT], it -/// manages insertions/deletions as well as counting references. It internally use a [NamespaceQuerier] -/// as a cache of new/removed/modified namespaces, to finally apply writing to the state when -/// calling [Self::flush]. -pub struct NamespaceBatchService { - ns_resolver: NamespaceQuerier, - ns_key_inc: u128, - ns_count_diff: i128, -} - -impl NamespaceBatchService { - pub fn new(storage: &dyn Storage) -> StdResult { - Ok(Self { - ns_resolver: NamespaceQuerier::new(), - ns_key_inc: NAMESPACE_KEY_INCREMENT.load(storage)?, - ns_count_diff: 0, - }) - } - - /// Resolve a [Namespace] from its internal key, returning it from cache in priority before accessing - /// the state. - pub fn resolve_from_key( - &mut self, - storage: &dyn Storage, - key: u128, - ) -> StdResult> { - self.ns_resolver.resolve_from_key(storage, key) - } - - /// Resolve a namespace by its value, or allocate a new one if not existing. This is applied to - /// the in-memory cache only, [Self::flush] must be called to write the changes to the state. - pub fn resolve_or_allocate( - &mut self, - storage: &dyn Storage, - value: String, - ) -> StdResult { - self.ns_resolver - .resolve_cell_from_val(storage, value.clone()) - .map(|maybe_cell| { - maybe_cell.map_or_else(|| self.allocate(value), |cell| cell.borrow().clone()) - }) - } - - /// Increment the count of references to this namespace. This is applied to the in-memory cache - /// only, [Self::flush] must be called to write the changes to the state. - pub fn count_ref(&mut self, storage: &dyn Storage, key: u128) -> StdResult { - self.ns_resolver - .resolve_cell_from_key(storage, key) - .and_then(|maybe_cell| { - maybe_cell.map_or_else( - || Err(StdError::not_found("Namespace")), - |cell| { - let mut ns = cell.borrow_mut(); - ns.counter += 1; - Ok(ns.clone()) - }, - ) - }) - } - - /// Decrement the count of references to this namespace, deleting it if not used anymore. - /// This is applied to the in-memory cache only, [Self::flush] must be called to write the changes - /// to the state. - pub fn free_ref(&mut self, storage: &dyn Storage, key: u128) -> StdResult { - self.ns_resolver - .resolve_cell_from_key(storage, key) - .and_then(|maybe_cell| { - let cell = match maybe_cell.filter(|c| c.borrow().counter > 0) { - Some(c) => c, - None => Err(StdError::generic_err( - "Trying to delete a non existing namespace", - ))?, - }; - - let mut ns = cell.borrow_mut(); - ns.counter -= 1; - if ns.counter == 0 { - self.ns_count_diff -= 1; - } - - let tmp = ns.clone(); - Ok(tmp) - }) - } - - /// Writes all the cached changes to the state, returning the namespace count diff. - pub fn flush(&mut self, storage: &mut dyn Storage) -> StdResult { - NAMESPACE_KEY_INCREMENT.save(storage, &self.ns_key_inc)?; - - for entry in &self.ns_resolver.by_val { - if entry.1.borrow().counter > 0 { - namespaces().save(storage, entry.0.to_string(), &entry.1.borrow().clone())?; - } else { - let res = namespaces().remove(storage, entry.0.to_string()); - match res { - Err(StdError::NotFound { .. }) => Ok(()), - _ => res, - }?; - } - } - - let count_diff = self.ns_count_diff; - self.ns_count_diff = 0; - self.ns_resolver.clear_cache(); - - Ok(count_diff) - } - - fn allocate(&mut self, value: String) -> Namespace { - let ns = Namespace { - value, - key: self.ns_key_inc, - counter: 0u128, - }; - - self.ns_key_inc += 1; - self.ns_count_diff += 1; - - self.ns_resolver.insert(ns).borrow().clone() - } -} diff --git a/contracts/axone-cognitarium/src/state/store.rs b/contracts/axone-cognitarium/src/state/store.rs deleted file mode 100644 index 83ece641..00000000 --- a/contracts/axone-cognitarium/src/state/store.rs +++ /dev/null @@ -1,90 +0,0 @@ -use crate::msg; -use crate::msg::StoreResponse; -use cosmwasm_std::{Addr, Uint128}; -use cw_storage_plus::Item; -use serde::{Deserialize, Serialize}; - -pub const STORE: Item = Item::new("store"); - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct Store { - pub owner: Addr, - pub limits: StoreLimits, - pub stat: StoreStat, -} - -impl Store { - pub fn new(owner: Addr, limits: StoreLimits) -> Store { - Store { - owner, - limits, - stat: StoreStat::default(), - } - } -} - -impl From for StoreResponse { - fn from(value: Store) -> Self { - Self { - owner: value.owner.into(), - limits: value.limits.into(), - stat: value.stat.into(), - } - } -} - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct StoreLimits { - pub max_triple_count: Uint128, - pub max_byte_size: Uint128, - pub max_triple_byte_size: Uint128, - pub max_query_limit: u32, - pub max_query_variable_count: u32, - pub max_insert_data_byte_size: Uint128, - pub max_insert_data_triple_count: Uint128, -} - -impl From for StoreLimits { - fn from(value: msg::StoreLimitsInput) -> Self { - StoreLimits { - max_triple_count: value.max_triple_count, - max_byte_size: value.max_byte_size, - max_triple_byte_size: value.max_triple_byte_size, - max_query_limit: value.max_query_limit, - max_query_variable_count: value.max_query_variable_count, - max_insert_data_byte_size: value.max_insert_data_byte_size, - max_insert_data_triple_count: value.max_insert_data_triple_count, - } - } -} - -impl From for msg::StoreLimits { - fn from(value: StoreLimits) -> Self { - msg::StoreLimits { - max_triple_count: value.max_triple_count, - max_byte_size: value.max_byte_size, - max_triple_byte_size: value.max_triple_byte_size, - max_query_limit: value.max_query_limit, - max_query_variable_count: value.max_query_variable_count, - max_insert_data_byte_size: value.max_insert_data_byte_size, - max_insert_data_triple_count: value.max_insert_data_triple_count, - } - } -} - -#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] -pub struct StoreStat { - pub triple_count: Uint128, - pub namespace_count: Uint128, - pub byte_size: Uint128, -} - -impl From for msg::StoreStat { - fn from(value: StoreStat) -> Self { - Self { - triple_count: value.triple_count, - namespace_count: value.namespace_count, - byte_size: value.byte_size, - } - } -} diff --git a/contracts/axone-cognitarium/src/state/test_util.rs b/contracts/axone-cognitarium/src/state/test_util.rs deleted file mode 100644 index dbfc5818..00000000 --- a/contracts/axone-cognitarium/src/state/test_util.rs +++ /dev/null @@ -1,41 +0,0 @@ -use crate::state::{Namespace, NamespaceSolver}; -use cosmwasm_std::{StdError, StdResult}; -use std::collections::BTreeMap; - -pub struct InMemoryNamespaceSolver { - by_val: BTreeMap, - by_key: BTreeMap, -} - -impl InMemoryNamespaceSolver { - pub fn with(namespaces: Vec<(u128, &str)>) -> Self { - let mut by_val = BTreeMap::new(); - let mut by_key = BTreeMap::new(); - for (key, value) in namespaces { - let ns = Namespace { - value: value.to_string(), - key, - counter: 1, - }; - by_val.insert(value.to_string(), ns.clone()); - by_key.insert(key, ns); - } - Self { by_val, by_key } - } -} - -impl NamespaceSolver for InMemoryNamespaceSolver { - fn resolve_from_key(&mut self, key: u128) -> StdResult { - self.by_key - .get(&key) - .ok_or_else(|| StdError::not_found("Namespace")) - .cloned() - } - - fn resolve_from_val(&mut self, _value: String) -> StdResult { - self.by_val - .get(&_value) - .ok_or_else(|| StdError::not_found("Namespace")) - .cloned() - } -} diff --git a/contracts/axone-cognitarium/src/state/triples.rs b/contracts/axone-cognitarium/src/state/triples.rs deleted file mode 100644 index 844670eb..00000000 --- a/contracts/axone-cognitarium/src/state/triples.rs +++ /dev/null @@ -1,282 +0,0 @@ -use crate::state::NamespaceSolver; -use blake3::Hash; -use cosmwasm_std::StdResult; -use cw_storage_plus::{Index, IndexList, IndexedMap, MultiIndex}; -use serde::{Deserialize, Serialize}; - -/// Represents a triple primary key as a tuple of: -/// - Object hash -/// - Predicate in a binary format -/// - Subject in a binary format -pub type TriplePK<'a> = (&'a [u8], Vec, Vec); - -pub struct TripleIndexes<'a> { - pub subject_and_predicate: MultiIndex<'a, (Vec, Vec), Triple, TriplePK<'a>>, -} - -impl IndexList for TripleIndexes<'_> { - fn get_indexes(&self) -> Box> + '_> { - let subject_and_predicate: &dyn Index = &self.subject_and_predicate; - Box::new(vec![subject_and_predicate].into_iter()) - } -} - -pub fn triples<'a>() -> IndexedMap, Triple, TripleIndexes<'a>> { - IndexedMap::new( - "TRIPLE", - TripleIndexes { - subject_and_predicate: MultiIndex::new( - |_pk, triple| (triple.subject.key(), triple.predicate.key()), - "TRIPLE", - "TRIPLE__SUBJECT_PREDICATE", - ), - }, - ) -} - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct Triple { - pub subject: Subject, - pub predicate: Predicate, - pub object: Object, -} - -impl Triple { - pub fn namespaces(&self) -> Vec { - let mut namespaces = Vec::with_capacity(3); - if let Subject::Named(n) = &self.subject { - namespaces.push(n.namespace); - } - - namespaces.push(self.predicate.namespace); - - match &self.object { - Object::Named(n) => namespaces.push(n.namespace), - Object::Literal(Literal::Typed { datatype, .. }) => namespaces.push(datatype.namespace), - _ => {} - } - - namespaces - } -} - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub enum Subject { - Named(Node), - Blank(BlankNode), -} - -impl Subject { - pub fn key(&self) -> Vec { - match self { - Subject::Named(n) => { - let node = n.key(); - let mut key: Vec = Vec::with_capacity(node.len() + 1); - key.push(b'n'); - key.extend(node); - - key - } - Subject::Blank(n) => { - let val = n.to_be_bytes(); - let mut key: Vec = Vec::with_capacity(val.len() + 1); - key.push(b'b'); - key.extend(val); - - key - } - } - } -} - -pub type Predicate = Node; - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub enum Object { - Named(Node), - Blank(BlankNode), - Literal(Literal), -} - -impl Object { - pub fn as_hash(&self) -> Hash { - let mut hasher = blake3::Hasher::new(); - match self { - Object::Named(n) => { - hasher - .update(b"n") - .update(n.namespace.to_be_bytes().as_slice()) - .update(n.value.as_bytes()); - } - Object::Blank(n) => { - hasher.update(b"b").update(n.to_be_bytes().as_slice()); - } - Object::Literal(l) => { - hasher.update(b"l"); - match l { - Literal::Simple { value } => hasher.update(b"s").update(value.as_bytes()), - Literal::I18NString { value, language } => hasher - .update(b"i") - .update(value.as_bytes()) - .update(language.as_bytes()), - Literal::Typed { value, datatype } => hasher - .update(b"t") - .update(value.as_bytes()) - .update(datatype.namespace.to_be_bytes().as_slice()) - .update(datatype.value.as_bytes()), - }; - } - } - - hasher.finalize() - } -} - -pub const BLANK_NODE_SIZE: usize = 16usize; -pub type BlankNode = u128; - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct Node { - pub namespace: u128, - pub value: String, -} - -impl Node { - pub fn key(&self) -> Vec { - let val = self.value.as_bytes(); - let mut key: Vec = Vec::with_capacity(val.len() + 16); - key.extend(self.namespace.to_be_bytes()); - key.extend(val); - - key - } - - pub fn as_iri(&self, ns_solver: &mut dyn NamespaceSolver) -> StdResult { - Ok(ns_solver.resolve_from_key(self.namespace)?.value + &self.value) - } -} - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub enum Literal { - Simple { value: String }, - I18NString { value: String, language: String }, - Typed { value: String, datatype: Node }, -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn object_hash() { - let cases = vec![ - ( - Object::Named(Node { - namespace: 0, - value: "val1".to_string(), - }), - Object::Named(Node { - namespace: 0, - value: "val2".to_string(), - }), - ), - ( - Object::Named(Node { - namespace: 1, - value: "val".to_string(), - }), - Object::Named(Node { - namespace: 2, - value: "val".to_string(), - }), - ), - (Object::Blank(0u128), Object::Blank(1u128)), - ( - Object::Literal(Literal::Simple { - value: "val1".to_string(), - }), - Object::Literal(Literal::Simple { - value: "val2".to_string(), - }), - ), - ( - Object::Literal(Literal::I18NString { - language: "fr".to_string(), - value: "val1".to_string(), - }), - Object::Literal(Literal::I18NString { - language: "fr".to_string(), - value: "val2".to_string(), - }), - ), - ( - Object::Literal(Literal::I18NString { - language: "fr".to_string(), - value: "val".to_string(), - }), - Object::Literal(Literal::I18NString { - language: "en".to_string(), - value: "val".to_string(), - }), - ), - ( - Object::Literal(Literal::Typed { - datatype: Node { - namespace: 0, - value: "n".to_string(), - }, - value: "val1".to_string(), - }), - Object::Literal(Literal::Typed { - datatype: Node { - namespace: 0, - value: "n".to_string(), - }, - value: "val2".to_string(), - }), - ), - ( - Object::Literal(Literal::Typed { - datatype: Node { - namespace: 0, - value: "n1".to_string(), - }, - value: "val".to_string(), - }), - Object::Literal(Literal::Typed { - datatype: Node { - namespace: 0, - value: "n2".to_string(), - }, - value: "val".to_string(), - }), - ), - ( - Object::Literal(Literal::Typed { - datatype: Node { - namespace: 1, - value: "n".to_string(), - }, - value: "val".to_string(), - }), - Object::Literal(Literal::Typed { - datatype: Node { - namespace: 2, - value: "n".to_string(), - }, - value: "val".to_string(), - }), - ), - ( - Object::Blank(0u128), - Object::Literal(Literal::Simple { - value: "val".to_string(), - }), - ), - ]; - - for case in cases { - assert_ne!(case.0.as_hash(), case.1.as_hash()) - } - } -} diff --git a/contracts/axone-cognitarium/src/storer/engine.rs b/contracts/axone-cognitarium/src/storer/engine.rs deleted file mode 100644 index fa165416..00000000 --- a/contracts/axone-cognitarium/src/storer/engine.rs +++ /dev/null @@ -1,300 +0,0 @@ -use crate::error::StoreError; -use crate::state::{ - triples, Literal, NamespaceBatchService, NamespaceQuerier, Node, Object, Store, Subject, - Triple, BLANK_NODE_IDENTIFIER_COUNTER, BLANK_NODE_SIZE, STORE, -}; -use crate::ContractError; -use axone_rdf::normalize::IdentifierIssuer; -use axone_rdf::serde::TripleReader; -use axone_rdf::uri::explode_iri; -use cosmwasm_std::{StdError, StdResult, Storage, Uint128}; -use rio_api::model; -use rio_api::model::Term; -use std::io::BufRead; -use std::ops::Neg; - -pub struct StoreEngine<'a> { - storage: &'a mut dyn Storage, - store: Store, - ns_batch_svc: NamespaceBatchService, - blank_node_id_issuer: IdentifierIssuer, - initial_triple_count: Uint128, - initial_byte_size: Uint128, -} - -impl<'a> StoreEngine<'a> { - pub fn new(storage: &'a mut dyn Storage) -> StdResult { - let store = STORE.load(storage)?; - let blank_node_id_counter = BLANK_NODE_IDENTIFIER_COUNTER.load(storage)?; - let ns_batch_svc = NamespaceBatchService::new(storage)?; - Ok(Self { - storage, - store: store.clone(), - ns_batch_svc, - blank_node_id_issuer: IdentifierIssuer::new("", blank_node_id_counter), - initial_triple_count: store.stat.triple_count, - initial_byte_size: store.stat.byte_size, - }) - } - - pub fn store_all( - &mut self, - reader: &mut TripleReader, - ) -> Result { - reader.read_all(|t| self.store_triple(t))?; - self.finish() - } - - fn store_triple(&mut self, t: model::Triple<'_>) -> Result<(), ContractError> { - self.store.stat.triple_count += Uint128::one(); - if self.store.stat.triple_count > self.store.limits.max_triple_count { - Err(StoreError::TripleCount(self.store.limits.max_triple_count))?; - } - if self.store.stat.triple_count - self.initial_triple_count - > self.store.limits.max_insert_data_triple_count - { - Err(StoreError::InsertDataTripleCount( - self.store.limits.max_insert_data_triple_count, - ))?; - } - - let triple = Self::rio_to_triple( - t, - &mut |ns_str| { - self.ns_batch_svc - .resolve_or_allocate(self.storage, ns_str) - .map(|ns| ns.key) - }, - &mut self.blank_node_id_issuer, - )?; - let t_size = Uint128::from(self.triple_size(&triple).map_err(ContractError::Std)? as u128); - if t_size > self.store.limits.max_triple_byte_size { - Err(StoreError::TripleByteSize( - t_size, - self.store.limits.max_triple_byte_size, - ))?; - } - - self.store.stat.byte_size += t_size; - if self.store.stat.byte_size > self.store.limits.max_byte_size { - Err(StoreError::ByteSize(self.store.limits.max_byte_size))?; - } - if self.store.stat.byte_size - self.initial_byte_size - > self.store.limits.max_insert_data_byte_size - { - Err(StoreError::InsertDataByteSize( - self.store.limits.max_insert_data_byte_size, - ))?; - } - - let mut new_ns_refs = Vec::new(); - triples() - .update( - self.storage, - ( - triple.object.as_hash().as_bytes(), - triple.predicate.key(), - triple.subject.key(), - ), - |maybe_triple| { - if let Some(t) = maybe_triple { - self.store.stat.triple_count -= Uint128::one(); - self.store.stat.byte_size -= t_size; - Ok(t) - } else { - new_ns_refs.append(&mut triple.namespaces()); - Ok(triple) - } - }, - ) - .map_err(ContractError::Std)?; - - for ns_key in new_ns_refs { - self.ns_batch_svc.count_ref(self.storage, ns_key)?; - } - Ok(()) - } - - pub fn delete_all(&mut self, triples: &[Triple]) -> Result { - for triple in triples { - self.delete_triple(triple)?; - } - self.finish() - } - - fn delete_triple(&mut self, triple: &Triple) -> Result<(), ContractError> { - let old = triples() - .may_load( - self.storage, - ( - triple.object.as_hash().as_bytes(), - triple.predicate.key(), - triple.subject.key(), - ), - ) - .map_err(ContractError::Std)?; - - if old.is_some() { - triples().replace( - self.storage, - ( - triple.object.as_hash().as_bytes(), - triple.predicate.key(), - triple.subject.key(), - ), - None, - old.as_ref(), - )?; - self.store.stat.triple_count -= Uint128::one(); - let triple_size = self.triple_size(triple).map_err(ContractError::Std)?; - self.store.stat.byte_size -= Uint128::from(triple_size as u128); - - for ns_key in triple.namespaces() { - self.ns_batch_svc - .free_ref(self.storage, ns_key) - .map_err(ContractError::Std)?; - } - } - Ok(()) - } - - /// Flushes the store to the storage. - /// Returns the number of triples added or removed (absolute value). - fn finish(&mut self) -> Result { - let ns_diff = self.ns_batch_svc.flush(self.storage)?; - if ns_diff > 0 { - self.store.stat.namespace_count += Uint128::new(ns_diff as u128); - } else { - self.store.stat.namespace_count -= Uint128::new(ns_diff.neg() as u128); - } - - BLANK_NODE_IDENTIFIER_COUNTER.save(self.storage, &self.blank_node_id_issuer.counter)?; - - STORE.save(self.storage, &self.store)?; - - let count_diff = self - .store - .stat - .triple_count - .abs_diff(self.initial_triple_count); - - self.initial_triple_count = self.store.stat.triple_count; - self.initial_byte_size = self.store.stat.byte_size; - - Ok(count_diff) - } - - fn rio_to_triple( - triple: model::Triple<'_>, - ns_fn: &mut F, - id_issuer: &mut IdentifierIssuer, - ) -> StdResult - where - F: FnMut(String) -> StdResult, - { - Ok(Triple { - subject: Self::rio_to_subject(triple.subject, ns_fn, id_issuer)?, - predicate: Self::rio_to_node(triple.predicate, ns_fn)?, - object: Self::rio_to_object(triple.object, ns_fn, id_issuer)?, - }) - } - - fn rio_to_subject( - subject: model::Subject<'_>, - ns_fn: &mut F, - id_issuer: &mut IdentifierIssuer, - ) -> StdResult - where - F: FnMut(String) -> StdResult, - { - match subject { - model::Subject::NamedNode(node) => Self::rio_to_node(node, ns_fn).map(Subject::Named), - model::Subject::BlankNode(node) => Ok(Subject::Blank( - id_issuer.get_n_or_issue(node.id.to_string()), - )), - model::Subject::Triple(_) => Err(StdError::generic_err("RDF star syntax unsupported")), - } - } - - fn rio_to_node(node: model::NamedNode<'_>, ns_fn: &mut F) -> StdResult - where - F: FnMut(String) -> StdResult, - { - let (ns, v) = explode_iri(node.iri)?; - Ok(Node { - namespace: ns_fn(ns)?, - value: v, - }) - } - - fn rio_to_object( - object: Term<'_>, - ns_fn: &mut F, - id_issuer: &mut IdentifierIssuer, - ) -> StdResult - where - F: FnMut(String) -> StdResult, - { - match object { - Term::BlankNode(node) => { - Ok(Object::Blank(id_issuer.get_n_or_issue(node.id.to_string()))) - } - Term::NamedNode(node) => Self::rio_to_node(node, ns_fn).map(Object::Named), - Term::Literal(literal) => Self::rio_to_literal(literal, ns_fn).map(Object::Literal), - Term::Triple(_) => Err(StdError::generic_err("RDF star syntax unsupported")), - } - } - - fn rio_to_literal(literal: model::Literal<'_>, ns_fn: &mut F) -> StdResult - where - F: FnMut(String) -> StdResult, - { - match literal { - model::Literal::Simple { value } => Ok(Literal::Simple { - value: value.to_string(), - }), - model::Literal::LanguageTaggedString { value, language } => Ok(Literal::I18NString { - value: value.to_string(), - language: language.to_string(), - }), - model::Literal::Typed { value, datatype } => { - Self::rio_to_node(datatype, ns_fn).map(|node| Literal::Typed { - value: value.to_string(), - datatype: node, - }) - } - } - } - - fn triple_size(&mut self, triple: &Triple) -> StdResult { - Ok(self.subject_size(&triple.subject)? - + self.node_size(&triple.predicate)? - + self.object_size(&triple.object)?) - } - - fn subject_size(&mut self, subject: &Subject) -> StdResult { - match subject { - Subject::Named(n) => self.node_size(n), - Subject::Blank(_) => Ok(BLANK_NODE_SIZE), - } - } - - fn node_size(&mut self, node: &Node) -> StdResult { - self.ns_batch_svc - .resolve_from_key(self.storage, node.namespace) - .and_then(NamespaceQuerier::none_as_error_middleware) - .map(|ns| ns.value.len() + node.value.len()) - } - - fn object_size(&mut self, object: &Object) -> StdResult { - Ok(match object { - Object::Blank(_) => BLANK_NODE_SIZE, - Object::Named(n) => self.node_size(n)?, - Object::Literal(l) => match l { - Literal::Simple { value } => value.len(), - Literal::I18NString { value, language } => value.len() + language.len(), - Literal::Typed { value, datatype } => value.len() + self.node_size(datatype)?, - }, - }) - } -} diff --git a/contracts/axone-cognitarium/src/storer/mod.rs b/contracts/axone-cognitarium/src/storer/mod.rs deleted file mode 100644 index 520992db..00000000 --- a/contracts/axone-cognitarium/src/storer/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod engine; - -pub use engine::*; diff --git a/contracts/axone-cognitarium/testdata/blank-nodes.ttl b/contracts/axone-cognitarium/testdata/blank-nodes.ttl deleted file mode 100644 index 6d2aaada..00000000 --- a/contracts/axone-cognitarium/testdata/blank-nodes.ttl +++ /dev/null @@ -1,37 +0,0 @@ -@prefix area: . -@prefix core: . -@prefix dataset: . -@prefix license: . -@prefix mediatype: . -@prefix owl: . -@prefix topic: . -@prefix xsd: . - - a owl:NamedIndividual, - ; - core:describes dataset:7ff3d2a4-e6b2-4b06-8619-4fc8740dad86 ; - core:hasCreator "IGN" ; - core:hasDescription "ADMIN EXPRESS permet d'effectuer des croisements avec d'autres sources de données dans le but de construire des représentations thématiques du territoire selon une granularité administrative (commune, arrondissement départementaux, département, région). ADMIN EXPRESS est décliné dans une édition \"COG\", conforme au code officiel géographique publié chaque année par l'INSEE."@fr ; - core:hasDescription "ADMIN EXPRESS allows cross-referencing with other data sources in order to build thematic representations of the territory according to an administrative granularity (commune, departmental district, department, region). ADMIN EXPRESS is available in a \"COG\" edition, in accordance with the official geographic code published each year by INSEE."@en ; - core:hasDescription "ADMIN EXPRESS ermöglicht es, Kreuzungen mit anderen Datenquellen vorzunehmen, um thematische Darstellungen des Gebiets nach administrativer Granularität (Gemeinde, Departementsbezirke, Departement, Region) zu erstellen. ADMIN EXPRESS ist in einer \"COG\"-Ausgabe erhältlich, die dem offiziellen geografischen Code entspricht, der jedes Jahr vom INSEE veröffentlicht wird."@de ; - core:hasFormat mediatype:application_vnd-shp ; - core:hasImage ; - core:hasLicense license:LO-FR-2_0 ; - core:hasPublisher "AXONE" ; - core:hasSpatialCoverage area:250 ; - core:hasTag "INSEE", - "commune", - "france", - "open data", - "territoire" ; - core:hasTemporalCoverage [ - a owl:NamedIndividual, core:Period ; - core:hasStartDate "2022-01-01T00:00:00+00:00"^^xsd:dateTime - ] ; - core:hasInformations [ - core:hasInformation "this is a dataset" - ] ; - core:hasTitle "ADMIN EXPRESS COG 2022 COMMUNE"@fr ; - core:hasTitle "ADMIN EXPRESS COG 2022 CITY"@en ; - core:hasTitle "ADMIN EXPRESS COG 2022 GEMEINDE"@de ; - core:hasTopic topic:Other . diff --git a/contracts/axone-cognitarium/testdata/sample.nq b/contracts/axone-cognitarium/testdata/sample.nq deleted file mode 100644 index 5a927ba5..00000000 --- a/contracts/axone-cognitarium/testdata/sample.nq +++ /dev/null @@ -1,40 +0,0 @@ - . - . - . - . - . - . - "A test Data Space."@en . - "Un Data Space de test."@fr . - "AXONE" . - "AXONE" . - "Test" . - . - "Data Space de test"@fr . - "Test Data Space"@en . - . - . - . - . - . - . - . - . - "AXONE" . - . - . - "test" . - "Dataset de test"@fr . - "test Dataset"@en . - "Me" . - . - "A test Dataset."@en . - "Un Dataset de test."@fr . - . - . - . - . - . - "2023-03-28T00:00:00+00:00"^^ . - "2023-03-28T00:00:00+00:00"^^ . - . diff --git a/contracts/axone-cognitarium/testdata/sample.nt b/contracts/axone-cognitarium/testdata/sample.nt deleted file mode 100644 index b681e782..00000000 --- a/contracts/axone-cognitarium/testdata/sample.nt +++ /dev/null @@ -1,40 +0,0 @@ - . - . - . - . - . - . - "A test Data Space."@en . - "Un Data Space de test."@fr . - "AXONE" . - "AXONE" . - "Test" . - . - "Data Space de test"@fr . - "Test Data Space"@en . - . - . - . - . - . - . - . - . - "AXONE" . - . - . - "test" . - "Dataset de test"@fr . - "test Dataset"@en . - "Me" . - . - "A test Dataset."@en . - "Un Dataset de test."@fr . - . - . - . - . - . - "2023-03-28T00:00:00+00:00"^^ . - "2023-03-28T00:00:00+00:00"^^ . - . diff --git a/contracts/axone-cognitarium/testdata/sample.rdf.xml b/contracts/axone-cognitarium/testdata/sample.rdf.xml deleted file mode 100644 index e3c8599a..00000000 --- a/contracts/axone-cognitarium/testdata/sample.rdf.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - - - A test Data Space. - Un Data Space de test. - AXONE - - AXONE - Test - - Data Space de test - Test Data Space - - - - - - - - - - - - AXONE - - - - test - Dataset de test - test Dataset - Me - - A test Dataset. - Un Dataset de test. - - - - - - - 2023-03-28T00:00:00+00:00 - - 2023-03-28T00:00:00+00:00 - - - - diff --git a/contracts/axone-cognitarium/testdata/sample.ttl b/contracts/axone-cognitarium/testdata/sample.ttl deleted file mode 100644 index 30a17cd1..00000000 --- a/contracts/axone-cognitarium/testdata/sample.ttl +++ /dev/null @@ -1,43 +0,0 @@ -@prefix owl: . -@prefix ns0: . -@prefix xsd: . - - - a owl:NamedIndividual, ; - ns0:hasRegistrar . - - - a owl:NamedIndividual, ; - ns0:describes ; - ns0:hasDescription "A test Data Space."@en, "Un Data Space de test."@fr ; - ns0:hasPublisher "AXONE" ; - ns0:hasTag "AXONE", "Test" ; - ns0:hasTopic ; - ns0:hasTitle "Data Space de test"@fr, "Test Data Space"@en . - - - a owl:NamedIndividual, ns0:Dataset ; - ns0:hasIdentifier ; - ns0:providedBy ; - ns0:belongsTo ; - ns0:hasRegistrar . - - - a owl:NamedIndividual, ; - ns0:hasPublisher "AXONE" ; - ns0:hasLicense ; - ns0:hasFormat ; - ns0:hasTag "test" ; - ns0:hasTitle "Dataset de test"@fr, "test Dataset"@en ; - ns0:hasCreator "Me" ; - ns0:describes ; - ns0:hasDescription "A test Dataset."@en, "Un Dataset de test."@fr ; - ns0:hasTopic . - - - a owl:NamedIndividual, ; - ns0:createdBy ; - ns0:lastModifiedBy ; - ns0:updatedOn "2023-03-28T00:00:00+00:00"^^xsd:dateTime ; - ns0:createdOn "2023-03-28T00:00:00+00:00"^^xsd:dateTime ; - ns0:describes . diff --git a/contracts/axone-cognitarium/tests/e2e/features/insert.feature b/contracts/axone-cognitarium/tests/e2e/features/insert.feature deleted file mode 100644 index 8192c05c..00000000 --- a/contracts/axone-cognitarium/tests/e2e/features/insert.feature +++ /dev/null @@ -1,32 +0,0 @@ -Feature: Cognitarium insertion - - Scenario: Inserting some rdf data - This scenario demonstrates inserting some rdf data into the Cognitarium smart contract. - - Given a smart contract instantiated with message: - """yaml - limits: - max_triple_count: '10000' - max_byte_size: '2000000' - max_triple_byte_size: '300' - max_query_limit: 4 - max_query_variable_count: 5 - max_insert_data_byte_size: '10000' - max_insert_data_triple_count: '100' - """ - When the smart contract is called with the following execute message: - """yaml - !insert_data - format: turtle - data: | - @prefix ex: . - @prefix xsd: . - - ex:Alice a ; - ex:hasAge "30"^^xsd:integer ; - ex:hasEmail "alice@example.com" . - """ - Then response is successful - Then response attributes should be: - | action | insert | - | triple_count | 3 | diff --git a/contracts/axone-cognitarium/tests/e2e/main.rs b/contracts/axone-cognitarium/tests/e2e/main.rs deleted file mode 100644 index f4bd02f3..00000000 --- a/contracts/axone-cognitarium/tests/e2e/main.rs +++ /dev/null @@ -1,198 +0,0 @@ -use axone_cognitarium::contract::{execute, instantiate}; -use axone_cognitarium::ContractError; -use base64::engine::general_purpose; -use base64::Engine; -use cosmwasm_std::testing::{ - message_info, mock_dependencies, mock_env, MockApi, MockQuerier, MockStorage, -}; -use cosmwasm_std::{MessageInfo, OwnedDeps, Response}; -use cucumber::parser::{Basic, Error}; -use cucumber::{gherkin, given, then, when, World}; -use futures::{stream, TryStreamExt}; -use serde_yaml::Value; -use std::fmt::Debug; -use std::path::Path; -use std::vec; -use testing::addr::addr; - -#[derive(World)] -#[world(init = Self::new)] -pub struct SmartContractWorld { - deps: OwnedDeps, - info: MessageInfo, - response: Result, -} - -impl SmartContractWorld { - fn new() -> Self { - SmartContractWorld { - deps: mock_dependencies(), - info: message_info(&addr("owner"), &[]), - response: Ok(Response::new()), - } - } -} - -impl Debug for SmartContractWorld { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("SmartContractWorld") - .field("info", &self.info) - .field("response", &self.response) - .finish() - } -} - -struct CustomParser; - -impl> cucumber::Parser for CustomParser { - type Cli = >::Cli; - type Output = stream::MapOk< - stream::Iter>>, - fn(gherkin::Feature) -> gherkin::Feature, - >; - - fn parse(self, input: I, cli: Self::Cli) -> Self::Output { - Basic::new().parse(input, cli).map_ok(|mut feature| { - feature.scenarios = feature - .scenarios - .into_iter() - .map(|mut scenario| { - scenario.steps = scenario - .steps - .into_iter() - .map(|mut step| { - Self::convert_step_data_to_base64(&mut step); - step - }) - .collect(); - scenario - }) - .collect(); - feature - }) - } -} - -impl CustomParser { - /// Convert the data in the step to base64 if it is a yaml docstring with a 'data' field - /// that is a string. This allows to keep the data in human-readable format in the feature rather - /// than base64. - fn convert_step_data_to_base64(step: &mut gherkin::Step) { - if let Some(docstring) = &step.docstring { - if let ("yaml", content) = extract_type_and_content(docstring) { - let mut value: Value = serde_yaml::from_str(content).unwrap(); - - if let Value::Tagged(ref mut tag) = value { - if let Some(data_value) = tag.value.get_mut("data") { - if let Value::String(data) = data_value { - *data = general_purpose::STANDARD.encode(&data); - } - } - } - step.docstring = Some(format!("yaml\n{}", serde_yaml::to_string(&value).unwrap())); - } - } - } -} - -#[given(regex = r"^a smart contract instantiated with message:$")] -fn smart_contract_instantiated_with_message(world: &mut SmartContractWorld, step: &gherkin::Step) { - match &step.docstring { - Some(docstring) => { - let content = extract_yaml_content(docstring).unwrap(); - let instantiate_msg = serde_yaml::from_str(content).unwrap(); - - instantiate( - world.deps.as_mut(), - mock_env(), - world.info.clone(), - instantiate_msg, - ) - .unwrap(); - return; - } - _ => panic!("No message provided"), - } -} - -#[when(regex = r"^the smart contract is called with the following execute message:$")] -fn the_smart_contract_is_called_with_the_following_execute_message( - world: &mut SmartContractWorld, - step: &gherkin::Step, -) { - match &step.docstring { - Some(docstring) => { - let content = extract_yaml_content(docstring).unwrap(); - let insert_data_msg = serde_yaml::from_str(content).unwrap(); - - world.response = execute( - world.deps.as_mut(), - mock_env(), - world.info.clone(), - insert_data_msg, - ); - } - None => panic!("No message provided"), - }; -} - -#[then(regex = r"^response is (successful|error)$")] -#[allow(unused_variables)] -fn response_is_successful_or_error( - world: &mut SmartContractWorld, - step: &gherkin::Step, - status: String, -) { - match status.as_str() { - "successful" => assert!(world.response.is_ok()), - "error" => assert!(world.response.is_err()), - _ => unreachable!(), - } -} - -#[then(regex = r"^response attributes should be:$")] -fn response_attributes_should_be(world: &mut SmartContractWorld, step: &gherkin::Step) { - if let Some(table) = &step.table { - let response = world.response.as_ref().unwrap(); - - for row in &table.rows { - let key = &row[0]; - let value = &row[1]; - - if let Some(attr) = response.attributes.iter().find(|&attr| attr.key == *key) { - assert_eq!( - attr.value, *value, - "Expected attribute '{}' to have value '{}', but found '{}'", - key, value, attr.value - ); - } else { - panic!( - "Expected attribute '{}' with value '{}' was not found", - key, value - ); - } - } - } else { - panic!("No attributes provided"); - } -} - -fn extract_type_and_content(docstring: &str) -> (&str, &str) { - let (doctype, content) = docstring.split_once('\n').unwrap_or((docstring, "")); - (doctype, content) -} - -fn extract_yaml_content(docstring: &str) -> Result<&str, &str> { - match extract_type_and_content(docstring) { - ("yaml", content) => Ok(content), - _ => Err("only yaml docstrings are supported"), - } -} - -fn main() { - futures::executor::block_on( - SmartContractWorld::cucumber::<&str>() - .with_parser(CustomParser) - .run("tests/e2e/features"), - ); -} diff --git a/contracts/axone-dataverse/Cargo.toml b/contracts/axone-dataverse/Cargo.toml deleted file mode 100644 index 00e7bfa1..00000000 --- a/contracts/axone-dataverse/Cargo.toml +++ /dev/null @@ -1,50 +0,0 @@ -[package] -authors = { workspace = true } -description = "The Smart Contract overseeing and managing the Dataverse in the AXONE ecosystem." -edition = { workspace = true } -homepage = { workspace = true } -keywords = { workspace = true } -license = { workspace = true } -name = "axone-dataverse" -repository = { workspace = true } -rust-version = { workspace = true } -version = { workspace = true } - -exclude = [ - # Those files are rust-optimizer artifacts. You might want to commit them for convenience but they should not be part of the source code publication. - "contract.wasm", - "hash.txt", -] - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[lib] -crate-type = ["cdylib", "rlib"] - -[dependencies] -axone-cognitarium.workspace = true -axone-cognitarium-client.workspace = true -axone-rdf.workspace = true -base64 = "0.22.1" -bech32 = "0.11.0" -cosmwasm-schema.workspace = true -cosmwasm-std.workspace = true -cw-storage-plus.workspace = true -cw-utils.workspace = true -cw2.workspace = true -itertools = "0.14.0" -multibase = "0.9.2" -rio_api.workspace = true -ripemd = "0.1.3" -serde.workspace = true -sha2 = "0.10.9" -thiserror.workspace = true -unsigned-varint = "0.8.0" - -[dev-dependencies] -base64 = "0.22.1" -testing.workspace = true - -[features] -# use library feature to disable all instantiate/execute/query exports -library = [] diff --git a/contracts/axone-dataverse/Makefile.toml b/contracts/axone-dataverse/Makefile.toml deleted file mode 100644 index 944012e8..00000000 --- a/contracts/axone-dataverse/Makefile.toml +++ /dev/null @@ -1,12 +0,0 @@ -[tasks.generate_schema] -args = ["run", "--bin", "schema"] -command = "cargo" - -[tasks.schema] -dependencies = ["generate_schema"] -script = ''' -SCHEMA=$(find schema -type f -maxdepth 1 -name '*.json' -print0) -TITLE=$(jq -r .contract_name $SCHEMA) -jq --arg description "$(cat README.md)" '. + {description: $description}' $SCHEMA > $SCHEMA.tmp && mv $SCHEMA.tmp $SCHEMA -jq --arg title $TITLE '. + {title: $title}' $SCHEMA > $SCHEMA.tmp && mv $SCHEMA.tmp $SCHEMA -''' diff --git a/contracts/axone-dataverse/README.md b/contracts/axone-dataverse/README.md deleted file mode 100644 index 779b0ffe..00000000 --- a/contracts/axone-dataverse/README.md +++ /dev/null @@ -1,44 +0,0 @@ -# Dataverse - -## Overview - -The `dataverse` smart contract is responsible for overseeing and managing the Dataverse. - -## Dataverse - -The Dataverse is an ever-expanding universe that encompasses a wide range of digital resources. These include datasets, data processing algorithms, ML algorithm, storage resources, computational resources, identity management solutions, orchestration engines, oracles, and many other resources recorded on the blockchain. - -When the smart contract is instantiated, it creates a Dataverse instance. This instance is separated and isolated from any pre-existing ones, and as many dataverse instances as required can be created. - -## Zones - -Zones within the Dataverse represent distinct areas or domains where specific governance rules and policies are applied. These Zones are conceptual frameworks created to manage and organize resources under a unified set of regulations and permissions. - -Each Zone is defined by its unique identity and set of governing rules, which dictate how resources within it can be accessed, used, and shared. This approach allows for granular control over different segments of the Dataverse, catering to various requirements and use cases. By managing these Zones, the dataverse smart contract ensures that resources are utilized in compliance with the defined policies and consents, thereby maintaining order and integrity within the Dataverse. - -## Resources - -In the context of the Dataverse, Resources refer to a broad category of digital entities, which include Services and Digital Resources. - -- **Digital Resources**: This category extends to various digital entities such as datasets, algorithms, machine learning models, and other digital assets. Like Services, Digital Resources are identified by a URI in conjunction with the Service responsible for their provision. - -- **Services**: These are network-accessible functionalities like REST APIs, gRPC services, and other similar offerings. Each Service in the Dataverse is uniquely identified by its Uniform Resource Identifier (URI) and is associated with a specific Registrar responsible for its registration and management. - -## Decentralized Identifiers (DID) - -Decentralized Identifiers (DID) are a foundational element in the Dataverse, serving as unique, persistent, and globally resolvable identifiers that are fully under the control of the DID subject, which could be an individual, organization, or a any kind of resource (dataset, -algorithm, nft, ML algorithm). - -DIDs play a crucial role in the Dataverse by facilitating a trustable and interoperable identity mechanism. They enable the establishment of a verifiable and self-sovereign identity for resources, services, and entities within the ecosystem. - -## Claims - -Claims in the Dataverse context are assertions or statements made about a Resource identified by a DID. - -Claims play a pivotal role in the governance framework of the Dataverse. By leveraging knowledge derived from verifiable credentials, the governances established by Zones can evaluate the fulfillment of specific rules and compliance. This evaluation is critical in ensuring that the resources within the Dataverse adhere to the established norms, policies, and requirements. - -Claims are submitted in the form of [Verifiable Presentations (VPs)](https://www.w3.org/TR/vc-data-model/#presentations), which are aggregations of one or more [Verifiable Credentials (VCs)](https://www.w3.org/TR/vc-data-model/#what-is-a-verifiable-credential). - -## Dependencies - -Given its role and status, this smart contract serves as the primary access point for the AXONE protocol to manage all on-chain stored resources. To fulfill its tasks, the smart contract relies on other smart contracts within the AXONE ecosystem. Notably, it uses the `Cognitarium` smart contract for persisting the Dataverse representation in an ontological form and the `Law Stone` smart contract to establish governance rules. diff --git a/contracts/axone-dataverse/src/contract.rs b/contracts/axone-dataverse/src/contract.rs deleted file mode 100644 index 62f732de..00000000 --- a/contracts/axone-dataverse/src/contract.rs +++ /dev/null @@ -1,620 +0,0 @@ -#[cfg(not(feature = "library"))] -use cosmwasm_std::entry_point; -use cosmwasm_std::{ - instantiate2_address, to_json_binary, Binary, CodeInfoResponse, Deps, DepsMut, Env, - MessageInfo, Response, StdError, StdResult, WasmMsg, -}; -use cw2::set_contract_version; -use cw_utils::nonpayable; - -use crate::error::ContractError; -use crate::msg::{ExecuteMsg, InstantiateMsg, QueryMsg}; -use crate::state::{Dataverse, DATAVERSE}; - -// version info for migration info -const CONTRACT_NAME: &str = concat!("crates.io:", env!("CARGO_PKG_NAME")); -const CONTRACT_VERSION: &str = env!("CARGO_PKG_VERSION"); - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn instantiate( - deps: DepsMut<'_>, - env: Env, - info: MessageInfo, - msg: InstantiateMsg, -) -> Result { - nonpayable(&info)?; - set_contract_version(deps.storage, CONTRACT_NAME, CONTRACT_VERSION)?; - - let creator = deps.api.addr_canonicalize(env.contract.address.as_str())?; - let CodeInfoResponse { checksum, .. } = deps - .querier - .query_wasm_code_info(msg.triplestore_config.code_id.u64())?; - let salt = Binary::from(msg.name.as_bytes()); - - let _triplestore_address = instantiate2_address(checksum.as_slice(), &creator, &salt)?; - - // Necessary stuff for testing purposes, see: https://github.com/CosmWasm/cosmwasm/issues/1648 - let triplestore_address = { - #[cfg(not(test))] - { - deps.api.addr_humanize(&_triplestore_address)? - } - #[cfg(test)] - cosmwasm_std::Addr::unchecked("predicted address") - }; - - DATAVERSE.save( - deps.storage, - &Dataverse { - name: msg.name.clone(), - triplestore_address: triplestore_address.clone(), - }, - )?; - - Ok(Response::new() - .add_attribute("triplestore_address", triplestore_address.to_string()) - .add_message(WasmMsg::Instantiate2 { - admin: Some(env.contract.address.to_string()), - code_id: msg.triplestore_config.code_id.u64(), - label: format!("{}_triplestore", msg.name), - msg: to_json_binary(&axone_cognitarium::msg::InstantiateMsg { - limits: msg.triplestore_config.limits.into(), - })?, - funds: vec![], - salt, - })) -} - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn execute( - deps: DepsMut<'_>, - env: Env, - info: MessageInfo, - msg: ExecuteMsg, -) -> Result { - nonpayable(&info)?; - match msg { - ExecuteMsg::SubmitClaims { claims, format: _ } => { - execute::submit_claims(deps, env, info, claims) - } - _ => Err(StdError::generic_err("Not implemented").into()), - } -} - -pub mod execute { - use super::*; - use crate::credential::error::VerificationError; - use crate::credential::vc::VerifiableCredential; - use crate::registrar::credential::DataverseCredential; - use crate::registrar::registry::ClaimRegistrar; - use axone_rdf::dataset::Dataset; - use axone_rdf::serde::NQuadsReader; - use cosmwasm_std::ensure; - use std::io::BufReader; - - pub fn submit_claims( - deps: DepsMut<'_>, - env: Env, - info: MessageInfo, - claims: Binary, - ) -> Result { - let buf = BufReader::new(claims.as_slice()); - let mut reader = NQuadsReader::new(buf); - let rdf_quads = reader.read_all()?; - let vc_dataset = Dataset::from(rdf_quads.as_slice()); - let vc = VerifiableCredential::try_from(&vc_dataset)?; - - // check proofs if any. - // accept unverified credentials if the issuer matches the sender, as the transaction's - // signature serves as proof. - if !vc.proof.is_empty() { - vc.verify(&deps)?; - } else { - ensure!( - vc.is_issued_by(&info.sender), - VerificationError::NoSuitableProof - ); - } - - let credential = DataverseCredential::try_from((env, info, &vc))?; - let registrar = ClaimRegistrar::try_new(deps.storage)?; - - Ok(Response::default() - .add_attribute("action", "submit_claims") - .add_attribute("credential", credential.id) - .add_attribute("subject", credential.claim.id) - .add_attribute("type", credential.r#type) - .add_message(registrar.submit_claim(&deps, &credential)?)) - } -} - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn query(deps: Deps<'_>, _env: Env, msg: QueryMsg) -> StdResult { - match msg { - QueryMsg::Dataverse {} => to_json_binary(&query::dataverse(deps)?), - } -} - -pub mod query { - use crate::msg::DataverseResponse; - use crate::state::DATAVERSE; - use cosmwasm_std::{Deps, StdResult}; - - pub fn dataverse(deps: Deps<'_>) -> StdResult { - DATAVERSE.load(deps.storage).map(|d| DataverseResponse { - name: d.name, - triplestore_address: d.triplestore_address, - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::msg::{ - DataverseResponse, RdfDatasetFormat, TripleStoreConfig, TripleStoreLimitsInput, - }; - use crate::testutil::testutil::read_test_data; - use axone_cognitarium::msg::{DataFormat, Head, Results, SelectResponse}; - use axone_cognitarium::parser::{ - Node, SelectItem, SelectQuery, TriplePattern, Value, VarOrNamedNode, VarOrNode, - VarOrNodeOrLiteral, WhereClause, IRI, - }; - use cosmwasm_std::testing::{message_info, mock_dependencies, mock_env}; - use cosmwasm_std::{ - coins, from_json, Addr, Attribute, Checksum, ContractResult, CosmosMsg, SubMsg, - SystemError, SystemResult, Uint128, Uint64, WasmQuery, - }; - use cw_utils::PaymentError::NonPayable; - use std::collections::BTreeMap; - use testing::addr::{addr, CREATOR, SENDER}; - use testing::mock::mock_env_addr; - - #[test] - fn proper_instantiate() { - let mut deps = mock_dependencies(); - deps.querier.update_wasm(|query| match query { - WasmQuery::CodeInfo { code_id, .. } => { - let resp = CodeInfoResponse::new( - code_id.clone(), - addr(CREATOR), - Checksum::from_hex( - "3B94AAF0B7D804B5B458DED0D20CACF95D2A1C8DF78ED3C89B61291760454AEC", - ) - .unwrap(), - ); - SystemResult::Ok(ContractResult::Ok(to_json_binary(&resp).unwrap())) - } - _ => SystemResult::Err(SystemError::Unknown {}), - }); - - let store_limits = TripleStoreLimitsInput { - max_byte_size: Some(Uint128::from(50000u128)), - ..Default::default() - }; - - let msg = InstantiateMsg { - name: "my-dataverse".to_string(), - triplestore_config: TripleStoreConfig { - code_id: Uint64::from(17u64), - limits: store_limits.clone(), - }, - }; - - let env = mock_env_addr(); - let info = message_info(&addr(CREATOR), &[]); - let res = instantiate(deps.as_mut(), env.clone(), info, msg).unwrap(); - - assert_eq!( - res.attributes, - vec![Attribute::new("triplestore_address", "predicted address")] - ); - assert_eq!( - res.messages, - vec![SubMsg::new(WasmMsg::Instantiate2 { - admin: Some(env.contract.address.to_string()), - code_id: 17, - label: "my-dataverse_triplestore".to_string(), - msg: to_json_binary(&axone_cognitarium::msg::InstantiateMsg { - limits: store_limits.into(), - }) - .unwrap(), - funds: vec![], - salt: Binary::from("my-dataverse".as_bytes()), - })] - ); - assert_eq!( - DATAVERSE.load(&deps.storage).unwrap(), - Dataverse { - name: "my-dataverse".to_string(), - triplestore_address: Addr::unchecked("predicted address"), - } - ) - } - - #[test] - fn funds_initialization() { - let mut deps = mock_dependencies(); - let env = mock_env(); - let info = message_info(&addr(SENDER), &coins(10, "uaxone")); - - let msg = InstantiateMsg { - name: "my-dataverse".to_string(), - triplestore_config: TripleStoreConfig { - code_id: Uint64::from(17u64), - limits: TripleStoreLimitsInput::default(), - }, - }; - - let result = instantiate(deps.as_mut(), env, info, msg); - assert!(result.is_err()); - assert!(matches!( - result.unwrap_err(), - ContractError::Payment(NonPayable {}) - )); - } - - #[test] - fn proper_dataverse() { - let mut deps = mock_dependencies(); - - DATAVERSE - .save( - deps.as_mut().storage, - &Dataverse { - name: "my-dataverse".to_string(), - triplestore_address: Addr::unchecked("my-dataverse-addr"), - }, - ) - .unwrap(); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Dataverse {}); - assert!(res.is_ok()); - let res: StdResult = from_json(res.unwrap()); - assert!(res.is_ok()); - assert_eq!( - res.unwrap(), - DataverseResponse { - name: "my-dataverse".to_string(), - triplestore_address: Addr::unchecked("my-dataverse-addr"), - } - ); - } - - #[test] - fn execute_fail_with_funds() { - let mut deps = mock_dependencies(); - let env = mock_env(); - let info = message_info(&addr(SENDER), &coins(10, "uaxone")); - - let msg = ExecuteMsg::SubmitClaims { - claims: Binary::from("data".as_bytes()), - format: Some(RdfDatasetFormat::NQuads), - }; - - let result = execute(deps.as_mut(), env, info, msg); - assert!(result.is_err()); - assert!(matches!( - result.unwrap_err(), - ContractError::Payment(NonPayable {}) - )); - } - - #[test] - fn proper_submit_claims() { - let mut deps = mock_dependencies(); - deps.querier.update_wasm(|query| match query { - WasmQuery::Smart { contract_addr, msg } => { - if contract_addr != "my-dataverse-addr" { - return SystemResult::Err(SystemError::NoSuchContract { - addr: contract_addr.to_string(), - }); - } - let query_msg: StdResult = from_json(msg); - assert_eq!( - query_msg, - Ok(axone_cognitarium::msg::QueryMsg::Select { - query: SelectQuery { - prefixes: vec![], - limit: Some(1u32), - select: vec![SelectItem::Variable("p".to_string())], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(Node::NamedNode(IRI::Full( - "http://example.edu/credentials/3732".to_string(), - ))), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - }] - }, - } - }) - ); - - let select_resp = SelectResponse { - results: Results { bindings: vec![] }, - head: Head { vars: vec![] }, - }; - SystemResult::Ok(ContractResult::Ok(to_json_binary(&select_resp).unwrap())) - } - _ => SystemResult::Err(SystemError::Unknown {}), - }); - - DATAVERSE - .save( - deps.as_mut().storage, - &Dataverse { - name: "my-dataverse".to_string(), - triplestore_address: Addr::unchecked("my-dataverse-addr"), - }, - ) - .unwrap(); - - let resp = execute( - deps.as_mut(), - mock_env(), - message_info( - &Addr::unchecked("axone1072nc6egexqr2v6vpp7yxwm68plvqnkf5uemr0"), - &[], - ), - ExecuteMsg::SubmitClaims { - claims: Binary::new(read_test_data("vc-eddsa-2020-ok.nq")), - format: Some(RdfDatasetFormat::NQuads), - }, - ); - - assert!(resp.is_ok()); - let resp = resp.unwrap(); - assert_eq!(resp.messages.len(), 1); - assert_eq!( - resp.attributes, - vec![ - Attribute::new("action", "submit_claims"), - Attribute::new("credential", "http://example.edu/credentials/3732"), - Attribute::new( - "subject", - "did:key:zDnaeUm3QkcyZWZTPttxB711jgqRDhkwvhF485SFw1bDZ9AQw" - ), - Attribute::new( - "type", - "https://example.org/examples#UniversityDegreeCredential" - ), - ] - ); - - let expected_data = r#" "12345"^^ . - "1571797419"^^ . - "axone1072nc6egexqr2v6vpp7yxwm68plvqnkf5uemr0" . - . - . - "2024-02-16T00:00:00Z"^^ . - . - "3"^^ . -_:c0 _:b0 . -_:b0 "Bachelor of Science and Arts"^^ . -_:b0 . - _:c0 . - "2026-02-16T00:00:00Z"^^ . -"#; - - match resp.messages[0].msg.clone() { - CosmosMsg::Wasm(WasmMsg::Execute { - contract_addr, - msg, - funds, - }) if contract_addr == "my-dataverse-addr".to_string() && funds == vec![] => { - let exec_msg: StdResult = from_json(msg); - assert!(exec_msg.is_ok()); - match exec_msg.unwrap() { - axone_cognitarium::msg::ExecuteMsg::InsertData { format, data } => { - assert_eq!(format, Some(DataFormat::NTriples)); - assert_eq!(String::from_utf8(data.to_vec()).unwrap(), expected_data); - } - _ => assert!(false), - } - } - _ => assert!(false), - } - } - - #[test] - fn submit_nonrdf_claims() { - let resp = execute( - mock_dependencies().as_mut(), - mock_env(), - message_info( - &Addr::unchecked("axone1072nc6egexqr2v6vpp7yxwm68plvqnkf5uemr0"), - &[], - ), - ExecuteMsg::SubmitClaims { - claims: Binary::new("notrdf".as_bytes().to_vec()), - format: Some(RdfDatasetFormat::NQuads), - }, - ); - - assert!(resp.is_err()); - assert!(matches!(resp.err().unwrap(), ContractError::ParseRDF(_))) - } - - #[test] - fn submit_invalid_claims() { - let resp = execute( - mock_dependencies().as_mut(), - mock_env(), - message_info( - &Addr::unchecked("axone1072nc6egexqr2v6vpp7yxwm68plvqnkf5uemr0"), - &[], - ), - ExecuteMsg::SubmitClaims { - claims: Binary::new(vec![]), - format: Some(RdfDatasetFormat::NQuads), - }, - ); - - assert!(resp.is_err()); - assert!(matches!( - resp.err().unwrap(), - ContractError::InvalidCredential(_) - )) - } - - #[test] - fn submit_unverified_claims_matching_sender() { - let mut deps = mock_dependencies(); - deps.querier.update_wasm(|query| match query { - WasmQuery::Smart { contract_addr, msg } => { - if contract_addr != "my-dataverse-addr" { - return SystemResult::Err(SystemError::NoSuchContract { - addr: contract_addr.to_string(), - }); - } - let query_msg: StdResult = from_json(msg); - assert_eq!( - query_msg, - Ok(axone_cognitarium::msg::QueryMsg::Select { - query: SelectQuery { - prefixes: vec![], - limit: Some(1u32), - select: vec![SelectItem::Variable("p".to_string())], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(Node::NamedNode(IRI::Full( - "http://example.edu/credentials/3732".to_string(), - ))), - predicate: VarOrNamedNode::Variable("p".to_string()), - object: VarOrNodeOrLiteral::Variable("o".to_string()), - }] - }, - } - }) - ); - - let select_resp = SelectResponse { - results: Results { bindings: vec![] }, - head: Head { vars: vec![] }, - }; - SystemResult::Ok(ContractResult::Ok(to_json_binary(&select_resp).unwrap())) - } - _ => SystemResult::Err(SystemError::Unknown {}), - }); - - DATAVERSE - .save( - deps.as_mut().storage, - &Dataverse { - name: "my-dataverse".to_string(), - triplestore_address: Addr::unchecked("my-dataverse-addr"), - }, - ) - .unwrap(); - - let resp = execute( - deps.as_mut(), - mock_env(), - message_info( - &Addr::unchecked("axone178mjppxcf3n9q3q7utdwrajdal0tsqvymz0900"), - &[], - ), - ExecuteMsg::SubmitClaims { - claims: Binary::new(read_test_data("vc-eddsa-2020-ok-unsecured-trusted.nq")), - format: Some(RdfDatasetFormat::NQuads), - }, - ); - - assert!(resp.is_ok()); - } - - #[test] - fn submit_unverified_claims() { - let resp = execute( - mock_dependencies().as_mut(), - mock_env(), - message_info( - &Addr::unchecked("axone1072nc6egexqr2v6vpp7yxwm68plvqnkf5uemr0"), - &[], - ), - ExecuteMsg::SubmitClaims { - claims: Binary::new(read_test_data("vc-eddsa-2020-ok-unsecured.nq")), - format: Some(RdfDatasetFormat::NQuads), - }, - ); - - assert!(resp.is_err()); - assert!(matches!( - resp.err().unwrap(), - ContractError::CredentialVerification(_) - )) - } - - #[test] - fn submit_unsupported_claims() { - let resp = execute( - mock_dependencies().as_mut(), - mock_env(), - message_info( - &Addr::unchecked("axone1072nc6egexqr2v6vpp7yxwm68plvqnkf5uemr0"), - &[], - ), - ExecuteMsg::SubmitClaims { - claims: Binary::new(read_test_data("vc-unsupported-1.nq")), - format: Some(RdfDatasetFormat::NQuads), - }, - ); - - assert!(resp.is_err()); - assert!(matches!( - resp.err().unwrap(), - ContractError::UnsupportedCredential(_) - )) - } - - #[test] - fn submit_existing_claims() { - let mut deps = mock_dependencies(); - deps.querier.update_wasm(|query| match query { - WasmQuery::Smart { .. } => { - let select_resp = SelectResponse { - results: Results { - bindings: vec![BTreeMap::from([( - "p".to_string(), - Value::BlankNode { - value: "".to_string(), - }, - )])], - }, - head: Head { vars: vec![] }, - }; - SystemResult::Ok(ContractResult::Ok(to_json_binary(&select_resp).unwrap())) - } - _ => SystemResult::Err(SystemError::Unknown {}), - }); - - DATAVERSE - .save( - deps.as_mut().storage, - &Dataverse { - name: "my-dataverse".to_string(), - triplestore_address: Addr::unchecked("my-dataverse-addr"), - }, - ) - .unwrap(); - - let resp = execute( - deps.as_mut(), - mock_env(), - message_info( - &Addr::unchecked("axone1072nc6egexqr2v6vpp7yxwm68plvqnkf5uemr0"), - &[], - ), - ExecuteMsg::SubmitClaims { - claims: Binary::new(read_test_data("vc-eddsa-2020-ok.nq")), - format: Some(RdfDatasetFormat::NQuads), - }, - ); - - assert!(resp.is_err()); - assert!( - matches!(resp.err().unwrap(), ContractError::CredentialAlreadyExists(id) if id == "http://example.edu/credentials/3732") - ); - } -} diff --git a/contracts/axone-dataverse/src/credential/crypto.rs b/contracts/axone-dataverse/src/credential/crypto.rs deleted file mode 100644 index 8dde39a4..00000000 --- a/contracts/axone-dataverse/src/credential/crypto.rs +++ /dev/null @@ -1,125 +0,0 @@ -use crate::credential::error::VerificationError; -use crate::credential::proof::ProofMaterial; -use axone_rdf::normalize::Normalizer; -use base64::prelude::BASE64_URL_SAFE_NO_PAD; -use base64::Engine; -use cosmwasm_std::DepsMut; -use rio_api::model::Quad; -use sha2::Digest; - -pub enum CanonicalizationAlg { - Urdna2015, -} - -pub enum DigestAlg { - Sha256, -} - -pub enum SignatureAlg { - Ed25519, - Secp256k1, -} - -pub struct CryptoSuite { - canon: CanonicalizationAlg, - hash: DigestAlg, - sign: SignatureAlg, -} - -impl From<(CanonicalizationAlg, DigestAlg, SignatureAlg)> for CryptoSuite { - fn from(value: (CanonicalizationAlg, DigestAlg, SignatureAlg)) -> Self { - Self { - canon: value.0, - hash: value.1, - sign: value.2, - } - } -} - -impl CryptoSuite { - pub fn verify_document( - &self, - deps: &'_ DepsMut<'_>, - unsecured_doc: &[Quad<'_>], - proof_opts: &[Quad<'_>], - proof_material: ProofMaterial<'_>, - pub_key: &[u8], - ) -> Result<(), VerificationError> { - let unsecured_doc_canon = self.canonicalize(unsecured_doc)?; - let proof_opts_canon = self.canonicalize(proof_opts)?; - - let hash = [self.hash(proof_opts_canon), self.hash(unsecured_doc_canon)].concat(); - - match proof_material { - ProofMaterial::Signature(v) => self.verify(deps, &hash, v, pub_key), - ProofMaterial::Jws(jws) => { - let (headers_b64, signature_b64) = Self::explode_jws(jws)?; - let signature = BASE64_URL_SAFE_NO_PAD - .decode(signature_b64) - .map_err(|_| VerificationError::InvalidJws)?; - - let signing_input = [headers_b64, b".", &hash].concat(); - let signing_input = match self.sign { - SignatureAlg::Ed25519 => signing_input, - SignatureAlg::Secp256k1 => { - let mut hasher = sha2::Sha256::new(); - hasher.update(signing_input); - - hasher.finalize().to_vec() - } - }; - - self.verify(deps, &signing_input, &signature, pub_key) - } - } - } - - fn canonicalize(&self, unsecured_document: &[Quad<'_>]) -> Result { - match self.canon { - CanonicalizationAlg::Urdna2015 => { - let mut normalizer = Normalizer::new(); - normalizer - .normalize(unsecured_document) - .map_err(VerificationError::from) - } - } - } - - fn hash(&self, transformed_document: String) -> Vec { - match self.hash { - DigestAlg::Sha256 => { - let mut hasher = sha2::Sha256::new(); - hasher.update(transformed_document); - - hasher.finalize().to_vec() - } - } - } - - fn verify( - &self, - deps: &'_ DepsMut<'_>, - message: &[u8], - signature: &[u8], - pub_key: &[u8], - ) -> Result<(), VerificationError> { - match match self.sign { - SignatureAlg::Ed25519 => deps.api.ed25519_verify(message, signature, pub_key), - SignatureAlg::Secp256k1 => deps.api.secp256k1_verify(message, signature, pub_key), - } { - Ok(true) => Ok(()), - Ok(false) => Err(VerificationError::WrongSignature), - Err(e) => Err(VerificationError::from(e)), - } - } - - fn explode_jws(jws: &str) -> Result<(&[u8], &[u8]), VerificationError> { - let mut parts = jws.split('.'); - Ok( - match (parts.next(), parts.next(), parts.next(), parts.next()) { - (Some(headers), Some(_), Some(sig), None) => (headers.as_bytes(), sig.as_bytes()), - _ => Err(VerificationError::InvalidJws)?, - }, - ) - } -} diff --git a/contracts/axone-dataverse/src/credential/error.rs b/contracts/axone-dataverse/src/credential/error.rs deleted file mode 100644 index 35ac708f..00000000 --- a/contracts/axone-dataverse/src/credential/error.rs +++ /dev/null @@ -1,72 +0,0 @@ -use axone_rdf::normalize::NormalizationError; -use thiserror::Error; - -#[derive(Debug, Error, PartialEq)] -pub enum InvalidCredentialError { - #[error("Missing identifier")] - MissingIdentifier, - - #[error("Missing issuer")] - MissingIssuer, - - #[error("Missing issuance date")] - MissingIssuanceDate, - - #[error("Invalid proof: {0}")] - InvalidProof(#[from] InvalidProofError), - - #[error("Malformed: {0}")] - Malformed(String), -} - -#[derive(Debug, Error, PartialEq)] -pub enum InvalidProofError { - #[error("Missing proof type")] - MissingProofType, - - #[error("Missing verification method")] - MissingVerificationMethod, - - #[error("Missing created")] - MissingCreated, - - #[error("Missing proof purpose")] - MissingProofPurpose, - - #[error("Missing proof value")] - MissingProofValue, - - #[error("Missing proof cryptosuite")] - MissingProofCryptosuite, - - #[error("Malformed proof value: {0}")] - MalformedProofValue(#[from] multibase::Error), - - #[error("Could not decode public key")] - InvalidPubKey, - - #[error("Malformed: {0}")] - Malformed(String), - - // Used internally only - #[error("Unsupported proof type")] - Unsupported, -} - -#[derive(Debug, Error)] -pub enum VerificationError { - #[error("Couldn't canonicalize document: {0}")] - RdfCanonError(#[from] NormalizationError), - - #[error("Couldn't verify signature: {0}")] - SignatureError(#[from] cosmwasm_std::VerificationError), - - #[error("Invalid JWS")] - InvalidJws, - - #[error("Signature mismatch")] - WrongSignature, - - #[error("Couldn't find a suitable proof")] - NoSuitableProof, -} diff --git a/contracts/axone-dataverse/src/credential/mod.rs b/contracts/axone-dataverse/src/credential/mod.rs deleted file mode 100644 index 7f634366..00000000 --- a/contracts/axone-dataverse/src/credential/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod crypto; -pub mod error; -mod proof; -pub mod rdf_marker; -pub mod vc; diff --git a/contracts/axone-dataverse/src/credential/proof.rs b/contracts/axone-dataverse/src/credential/proof.rs deleted file mode 100644 index dd6710d4..00000000 --- a/contracts/axone-dataverse/src/credential/proof.rs +++ /dev/null @@ -1,660 +0,0 @@ -use crate::credential::crypto::{CanonicalizationAlg, CryptoSuite, DigestAlg, SignatureAlg}; -use crate::credential::error::InvalidProofError; -use crate::credential::rdf_marker::{ - PROOF_RDF_CRYPTOSUITE, PROOF_RDF_JWS, PROOF_RDF_PROOF_PURPOSE, PROOF_RDF_PROOF_VALUE, - PROOF_RDF_PROOF_VALUE_TYPE, PROOF_RDF_VERIFICATION_METHOD, RDF_CREATED, RDF_DATE_TYPE, - RDF_TYPE, -}; -use axone_rdf::dataset::{Dataset, QuadIterator}; -use itertools::Itertools; -use rio_api::model::{GraphName, Literal, NamedNode, Quad, Term}; - -#[derive(Debug, PartialEq)] -pub enum Proof<'a> { - Ed25519Signature2018(Ed25519Signature2018Proof<'a>), - Ed25519Signature2020(Ed25519Signature2020Proof<'a>), - EcdsaSecp256k1Signature2019(EcdsaSecp256k1Signature2019Proof<'a>), - DataIntegrity(DataIntegrityProof<'a>), -} - -#[derive(Debug, PartialEq)] -pub enum ProofMaterial<'a> { - Signature(&'a [u8]), - Jws(&'a str), -} - -impl<'a> Proof<'a> { - pub fn suitable(&self, issuer: &str, purpose: ProofPurpose) -> bool { - let (controller, proof_purpose) = match self { - Self::Ed25519Signature2018(proof) => { - (proof.verification_method.controller, proof.purpose) - } - Self::Ed25519Signature2020(proof) => { - (proof.verification_method.controller, proof.purpose) - } - Self::EcdsaSecp256k1Signature2019(proof) => { - (proof.verification_method.controller, proof.purpose) - } - Proof::DataIntegrity(proof) => (proof.verification_method.controller, proof.purpose), - }; - - controller == issuer && proof_purpose == purpose - } - - pub fn crypto_suite(&self) -> CryptoSuite { - match self { - Proof::Ed25519Signature2018(_) | Proof::Ed25519Signature2020(_) => ( - CanonicalizationAlg::Urdna2015, - DigestAlg::Sha256, - SignatureAlg::Ed25519, - ), - Proof::EcdsaSecp256k1Signature2019(_) => ( - CanonicalizationAlg::Urdna2015, - DigestAlg::Sha256, - SignatureAlg::Secp256k1, - ), - Proof::DataIntegrity(p) => ( - CanonicalizationAlg::Urdna2015, - DigestAlg::Sha256, - p.cryptosuite.into(), - ), - } - .into() - } - - pub fn pub_key(&'a self) -> &'a [u8] { - match self { - Proof::Ed25519Signature2018(p) => &p.verification_method.pub_key, - Proof::Ed25519Signature2020(p) => &p.verification_method.pub_key, - Proof::EcdsaSecp256k1Signature2019(p) => &p.verification_method.pub_key, - Proof::DataIntegrity(p) => &p.verification_method.pub_key, - } - } - - pub fn proof_material(&'a self) -> ProofMaterial<'a> { - match self { - Proof::Ed25519Signature2018(p) => ProofMaterial::Jws(p.jws), - Proof::Ed25519Signature2020(p) => ProofMaterial::Signature(p.value.as_slice()), - Proof::EcdsaSecp256k1Signature2019(p) => ProofMaterial::Jws(p.jws), - Proof::DataIntegrity(p) => ProofMaterial::Signature(p.value.as_slice()), - } - } - - pub fn options(&'a self) -> &'a [Quad<'a>] { - match self { - Proof::Ed25519Signature2018(p) => p.options.as_ref(), - Proof::Ed25519Signature2020(p) => p.options.as_ref(), - Proof::EcdsaSecp256k1Signature2019(p) => p.options.as_ref(), - Proof::DataIntegrity(p) => p.options.as_ref(), - } - } - - fn extract_verification_method( - dataset: &'a Dataset<'a>, - proof_graph: GraphName<'a>, - ) -> Result<&'a str, InvalidProofError> { - dataset - .match_pattern( - None, - Some(PROOF_RDF_VERIFICATION_METHOD), - None, - Some(Some(proof_graph)), - ) - .objects() - .exactly_one() - .map_err(|e| match e.size_hint() { - (_, Some(_)) => InvalidProofError::Malformed( - "Proof cannot have more than one verification method".to_string(), - ), - _ => InvalidProofError::MissingVerificationMethod, - }) - .and_then(|o| match o { - Term::NamedNode(n) => Ok(n.iri), - _ => Err(InvalidProofError::Malformed( - "verification method type must be a named node".to_string(), - )), - }) - } - - fn parse_verification_method(raw: &'a str) -> Result<(&'a str, &'a str), InvalidProofError> { - Ok(match raw.split('#').collect::>()[..] { - [controller, key] => match controller.split(':').collect::>()[..] { - ["did", "key", controller_key] if controller_key == key => (controller, key), - _ => Err(InvalidProofError::Malformed( - "couldn't parse did key for verification method".to_string(), - ))?, - }, - _ => Err(InvalidProofError::Malformed( - "couldn't parse did key for verification method".to_string(), - ))?, - }) - } - - fn extract_created( - dataset: &'a Dataset<'a>, - proof_graph: GraphName<'a>, - ) -> Result<&'a str, InvalidProofError> { - dataset - .match_pattern(None, Some(RDF_CREATED), None, Some(Some(proof_graph))) - .objects() - .exactly_one() - .map_err(|e| match e.size_hint() { - (_, Some(_)) => InvalidProofError::Malformed( - "Proof cannot have more than one created date".to_string(), - ), - _ => InvalidProofError::MissingCreated, - }) - .and_then(|o| match o { - Term::Literal(Literal::Typed { value, datatype }) if datatype == RDF_DATE_TYPE => { - Ok(value) - } - _ => Err(InvalidProofError::Malformed( - "Proof created date must be a date".to_string(), - )), - }) - } - - fn extract_proof_purpose( - dataset: &'a Dataset<'a>, - proof_graph: GraphName<'a>, - ) -> Result<&'a str, InvalidProofError> { - dataset - .match_pattern( - None, - Some(PROOF_RDF_PROOF_PURPOSE), - None, - Some(Some(proof_graph)), - ) - .objects() - .exactly_one() - .map_err(|e| match e.size_hint() { - (_, Some(_)) => InvalidProofError::Malformed( - "Proof cannot have more than one proof purpose".to_string(), - ), - _ => InvalidProofError::MissingProofPurpose, - }) - .and_then(|o| match o { - Term::NamedNode(n) => Ok(n.iri), - _ => Err(InvalidProofError::Malformed( - "proof purpose type must be a named node".to_string(), - )), - }) - } - - fn extract_proof_value( - dataset: &'a Dataset<'a>, - proof_graph: GraphName<'a>, - ) -> Result<&'a str, InvalidProofError> { - dataset - .match_pattern( - None, - Some(PROOF_RDF_PROOF_VALUE), - None, - Some(Some(proof_graph)), - ) - .objects() - .exactly_one() - .map_err(|e| match e.size_hint() { - (_, Some(_)) => InvalidProofError::Malformed( - "Proof cannot have more than one proof value".to_string(), - ), - _ => InvalidProofError::MissingProofValue, - }) - .and_then(|o| match o { - Term::Literal(Literal::Typed { value, datatype }) - if datatype == PROOF_RDF_PROOF_VALUE_TYPE => - { - Ok(value) - } - _ => Err(InvalidProofError::Malformed( - "Proof value must be a multibase".to_string(), - )), - }) - } - - fn extract_jws( - dataset: &'a Dataset<'a>, - proof_graph: GraphName<'a>, - ) -> Result<&'a str, InvalidProofError> { - dataset - .match_pattern(None, Some(PROOF_RDF_JWS), None, Some(Some(proof_graph))) - .objects() - .exactly_one() - .map_err(|e| match e.size_hint() { - (_, Some(_)) => InvalidProofError::Malformed( - "Proof cannot have more than one proof jws".to_string(), - ), - _ => InvalidProofError::MissingProofValue, - }) - .and_then(|o| match o { - Term::Literal(Literal::Simple { value }) => Ok(value), - _ => Err(InvalidProofError::Malformed( - "Proof jws must be a string".to_string(), - )), - }) - } - - fn extract_proof_options( - dataset: &'a Dataset<'a>, - proof_graph: GraphName<'a>, - value_predicate: NamedNode<'a>, - ) -> Dataset<'a> { - Dataset::new( - dataset - .match_pattern(None, None, None, Some(Some(proof_graph))) - .skip_pattern((None, Some(value_predicate), None, None).into()) - .map(|quad| Quad { - subject: quad.subject, - predicate: quad.predicate, - object: quad.object, - graph_name: None, - }) - .collect(), - ) - } -} - -impl<'a> TryFrom<(&'a Dataset<'a>, GraphName<'a>)> for Proof<'a> { - type Error = InvalidProofError; - - fn try_from( - (dataset, proof_graph): (&'a Dataset<'a>, GraphName<'a>), - ) -> Result { - let proof_type = dataset - .match_pattern(None, Some(RDF_TYPE), None, Some(Some(proof_graph))) - .objects() - .exactly_one() - .map_err(|e| match e.size_hint() { - (_, Some(_)) => { - InvalidProofError::Malformed("Proof cannot have more than one type".to_string()) - } - _ => InvalidProofError::MissingProofType, - }) - .and_then(|o| match o { - Term::NamedNode(n) => Ok(n.iri), - _ => Err(InvalidProofError::Malformed( - "Proof type must be a named node".to_string(), - )), - })?; - - match proof_type { - "https://w3id.org/security#Ed25519Signature2018" => Ok(Self::Ed25519Signature2018( - Ed25519Signature2018Proof::try_from((dataset, proof_graph))?, - )), - "https://w3id.org/security#Ed25519Signature2020" => Ok(Self::Ed25519Signature2020( - Ed25519Signature2020Proof::try_from((dataset, proof_graph))?, - )), - "https://w3id.org/security#EcdsaSecp256k1Signature2019" => { - Ok(Self::EcdsaSecp256k1Signature2019( - EcdsaSecp256k1Signature2019Proof::try_from((dataset, proof_graph))?, - )) - } - "https://w3id.org/security#DataIntegrityProof" => Ok(Self::DataIntegrity( - DataIntegrityProof::try_from((dataset, proof_graph))?, - )), - _ => Err(InvalidProofError::Unsupported), - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum ProofPurpose { - AssertionMethod, - Unused, -} - -impl<'a> From<&'a str> for ProofPurpose { - fn from(value: &'a str) -> Self { - match value { - "https://w3id.org/security#assertionMethod" => ProofPurpose::AssertionMethod, - _ => ProofPurpose::Unused, - } - } -} - -#[derive(Debug, PartialEq)] -pub struct Ed25519Signature2018Proof<'a> { - // The verification method format being the same as the 2020 signature proof we reuse it. - verification_method: Ed25519VerificationKey2020<'a>, - created: &'a str, - purpose: ProofPurpose, - jws: &'a str, - options: Dataset<'a>, -} - -impl<'a> TryFrom<(&'a Dataset<'a>, GraphName<'a>)> for Ed25519Signature2018Proof<'a> { - type Error = InvalidProofError; - - fn try_from( - (dataset, proof_graph): (&'a Dataset<'a>, GraphName<'a>), - ) -> Result { - let v_method = Proof::extract_verification_method(dataset, proof_graph)?; - let p_purpose = Proof::extract_proof_purpose(dataset, proof_graph)?; - - Ok(Self { - verification_method: v_method.try_into()?, - created: Proof::extract_created(dataset, proof_graph)?, - purpose: p_purpose.into(), - jws: Proof::extract_jws(dataset, proof_graph)?, - options: Proof::extract_proof_options(dataset, proof_graph, PROOF_RDF_JWS), - }) - } -} - -#[derive(Debug, PartialEq)] -pub struct Ed25519Signature2020Proof<'a> { - verification_method: Ed25519VerificationKey2020<'a>, - created: &'a str, - purpose: ProofPurpose, - value: Vec, - options: Dataset<'a>, -} - -impl<'a> TryFrom<(&'a Dataset<'a>, GraphName<'a>)> for Ed25519Signature2020Proof<'a> { - type Error = InvalidProofError; - - fn try_from( - (dataset, proof_graph): (&'a Dataset<'a>, GraphName<'a>), - ) -> Result { - let v_method = Proof::extract_verification_method(dataset, proof_graph)?; - let p_purpose = Proof::extract_proof_purpose(dataset, proof_graph)?; - let (_, p_value) = multibase::decode(Proof::extract_proof_value(dataset, proof_graph)?) - .map_err(InvalidProofError::from)?; - - Ok(Self { - verification_method: v_method.try_into()?, - created: Proof::extract_created(dataset, proof_graph)?, - purpose: p_purpose.into(), - value: p_value, - options: Proof::extract_proof_options(dataset, proof_graph, PROOF_RDF_PROOF_VALUE), - }) - } -} - -#[derive(Debug, PartialEq)] -pub struct Ed25519VerificationKey2020<'a> { - id: &'a str, - controller: &'a str, - pub_key: Vec, -} - -impl<'a> TryFrom<&'a str> for Ed25519VerificationKey2020<'a> { - type Error = InvalidProofError; - - fn try_from(value: &'a str) -> Result { - let (controller, key) = Proof::parse_verification_method(value)?; - Ok(Self { - id: value, - controller, - pub_key: multiformats::decode_ed25519_key(key)?, - }) - } -} - -#[derive(Debug, PartialEq)] -pub struct EcdsaSecp256k1Signature2019Proof<'a> { - verification_method: EcdsaSecp256k1VerificationKey2019<'a>, - created: &'a str, - purpose: ProofPurpose, - jws: &'a str, - options: Dataset<'a>, -} - -impl<'a> TryFrom<(&'a Dataset<'a>, GraphName<'a>)> for EcdsaSecp256k1Signature2019Proof<'a> { - type Error = InvalidProofError; - - fn try_from( - (dataset, proof_graph): (&'a Dataset<'a>, GraphName<'a>), - ) -> Result { - let v_method = Proof::extract_verification_method(dataset, proof_graph)?; - let p_purpose = Proof::extract_proof_purpose(dataset, proof_graph)?; - - Ok(Self { - verification_method: v_method.try_into()?, - created: Proof::extract_created(dataset, proof_graph)?, - purpose: p_purpose.into(), - jws: Proof::extract_jws(dataset, proof_graph)?, - options: Proof::extract_proof_options(dataset, proof_graph, PROOF_RDF_JWS), - }) - } -} - -#[derive(Debug, PartialEq)] -pub struct EcdsaSecp256k1VerificationKey2019<'a> { - id: &'a str, - controller: &'a str, - pub_key: Vec, -} - -impl<'a> TryFrom<&'a str> for EcdsaSecp256k1VerificationKey2019<'a> { - type Error = InvalidProofError; - - fn try_from(value: &'a str) -> Result { - let (controller, key) = Proof::parse_verification_method(value)?; - Ok(Self { - id: value, - controller, - pub_key: multiformats::decode_secp256k1_key(key)?, - }) - } -} - -#[derive(Debug, PartialEq)] -pub struct DataIntegrityProof<'a> { - cryptosuite: DataIntegrityCryptoSuite, - verification_method: Multikey<'a>, - created: &'a str, - purpose: ProofPurpose, - value: Vec, - options: Dataset<'a>, -} - -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum DataIntegrityCryptoSuite { - EddsaRdfc2022, -} - -impl From for SignatureAlg { - fn from(value: DataIntegrityCryptoSuite) -> Self { - match value { - DataIntegrityCryptoSuite::EddsaRdfc2022 => SignatureAlg::Ed25519, - } - } -} - -impl<'a> DataIntegrityProof<'a> { - fn extract_cryptosuite( - dataset: &'a Dataset<'a>, - proof_graph: GraphName<'a>, - ) -> Result { - dataset - .match_pattern( - None, - Some(PROOF_RDF_CRYPTOSUITE), - None, - Some(Some(proof_graph)), - ) - .objects() - .exactly_one() - .map_err(|e| match e.size_hint() { - (_, Some(_)) => InvalidProofError::Malformed( - "Proof cannot have more than one proof cryptosuite".to_string(), - ), - _ => InvalidProofError::MissingProofCryptosuite, - }) - .and_then(|o| match o { - Term::Literal(Literal::Simple { value }) - | Term::Literal(Literal::Typed { value, datatype: _ }) => Ok(value), - _ => Err(InvalidProofError::Malformed( - "Proof cryptosuite must be a cryptosuite string".to_string(), - )), - }) - .and_then(|suite| { - Ok(match suite { - "eddsa-rdfc-2022" | "eddsa-2022" => DataIntegrityCryptoSuite::EddsaRdfc2022, - _ => Err(InvalidProofError::Malformed( - "Proof cryptosuite unknown or unsupported".to_string(), - ))?, - }) - }) - } -} - -impl<'a> TryFrom<(&'a Dataset<'a>, GraphName<'a>)> for DataIntegrityProof<'a> { - type Error = InvalidProofError; - - fn try_from( - (dataset, proof_graph): (&'a Dataset<'a>, GraphName<'a>), - ) -> Result { - let cryptosuite = DataIntegrityProof::extract_cryptosuite(dataset, proof_graph)?; - let v_method = Proof::extract_verification_method(dataset, proof_graph)?; - let p_purpose = Proof::extract_proof_purpose(dataset, proof_graph)?; - let (_, p_value) = multibase::decode(Proof::extract_proof_value(dataset, proof_graph)?) - .map_err(InvalidProofError::from)?; - - Ok(Self { - cryptosuite, - verification_method: (v_method, cryptosuite).try_into()?, - created: Proof::extract_created(dataset, proof_graph)?, - purpose: p_purpose.into(), - value: p_value, - options: Proof::extract_proof_options(dataset, proof_graph, PROOF_RDF_PROOF_VALUE), - }) - } -} - -#[derive(Debug, PartialEq)] -pub struct Multikey<'a> { - id: &'a str, - controller: &'a str, - pub_key: Vec, -} - -impl<'a> TryFrom<(&'a str, DataIntegrityCryptoSuite)> for Multikey<'a> { - type Error = InvalidProofError; - - fn try_from( - (value, cryptosuite): (&'a str, DataIntegrityCryptoSuite), - ) -> Result { - let (controller, key) = Proof::parse_verification_method(value)?; - Ok(Self { - id: value, - controller, - pub_key: match cryptosuite { - DataIntegrityCryptoSuite::EddsaRdfc2022 => multiformats::decode_ed25519_key(key), - }?, - }) - } -} - -mod multiformats { - use crate::credential::error::InvalidProofError; - use multibase::Base; - - pub fn decode_ed25519_key(src: &str) -> Result, InvalidProofError> { - let (base, data) = multibase::decode(src).map_err(|_| InvalidProofError::InvalidPubKey)?; - if base != Base::Base58Btc { - Err(InvalidProofError::InvalidPubKey)?; - } - - let (codec, key) = - unsigned_varint::decode::u16(&data).map_err(|_| InvalidProofError::InvalidPubKey)?; - if codec != 0xed { - Err(InvalidProofError::InvalidPubKey)?; - } - - Ok(key.to_vec()) - } - - pub fn decode_secp256k1_key(src: &str) -> Result, InvalidProofError> { - let (base, data) = multibase::decode(src).map_err(|_| InvalidProofError::InvalidPubKey)?; - if base != Base::Base58Btc { - Err(InvalidProofError::InvalidPubKey)?; - } - - let (codec, key) = - unsigned_varint::decode::u16(&data).map_err(|_| InvalidProofError::InvalidPubKey)?; - if codec != 0xe7 { - Err(InvalidProofError::InvalidPubKey)?; - } - - Ok(key.to_vec()) - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::testutil::testutil; - use base64::prelude::BASE64_STANDARD; - use base64::Engine; - use rio_api::model::BlankNode; - - #[test] - fn proof_from_dataset() { - let quads = testutil::read_test_quads("proof-ed255192020-options.nq"); - let proof_ok_options = Dataset::from(quads.as_slice()); - - let cases: Vec<(&str, Result, InvalidProofError>)> = vec![ - ( - "proof-ed255192020-ok.nq", - Ok(Proof::Ed25519Signature2020(Ed25519Signature2020Proof { - created: "2023-11-29T10:07:56Z", - verification_method: Ed25519VerificationKey2020 { - id: "did:key:z6MkqxFfjh6HNFuNSGmqVDJxL4fcdbcBco7CNHBLjEo125wu#z6MkqxFfjh6HNFuNSGmqVDJxL4fcdbcBco7CNHBLjEo125wu", - controller: "did:key:z6MkqxFfjh6HNFuNSGmqVDJxL4fcdbcBco7CNHBLjEo125wu", - pub_key: BASE64_STANDARD.decode("qt35Ph/BPVyvU0YhVdJ47m0p6APFYPoC5V5C7s5cdyg=").unwrap(), - }, - purpose: ProofPurpose::AssertionMethod, - value: BASE64_STANDARD.decode("371GN4kfgVEWv3/QY9qx1buNm9gYJGWgYOgMSVKOsnoJekPoQV2fjqR+3XMjd3avpQlARFyD/3a0J5tUS4aBCQ==").unwrap(), - options: proof_ok_options, - })), - ), - ( - "proof-invalid-pkey.nq", - Err(InvalidProofError::InvalidPubKey), - ), - ( - "proof-malformed.nq", - Err(InvalidProofError::Malformed("Proof type must be a named node".to_string())), - ), - ( - "proof-malformed-value.nq", - Err(InvalidProofError::MalformedProofValue(multibase::Error::UnknownBase('5'))), - ), - ( - "proof-missing-created.nq", - Err(InvalidProofError::MissingCreated), - ), - ( - "proof-missing-method.nq", - Err(InvalidProofError::MissingVerificationMethod), - ), - ( - "proof-missing-purpose.nq", - Err(InvalidProofError::MissingProofPurpose), - ), - ( - "proof-missing-type.nq", - Err(InvalidProofError::MissingProofType), - ), - ( - "proof-missing-value.nq", - Err(InvalidProofError::MissingProofValue), - ), - ( - "proof-unsupported.nq", - Err(InvalidProofError::Unsupported), - ), - ]; - - for (test_file, expected) in cases { - let owned_quads = testutil::read_test_quads(test_file); - let dataset = Dataset::from(owned_quads.as_slice()); - - let proof_res = - Proof::try_from((&dataset, GraphName::BlankNode(BlankNode { id: "b0" }))); - assert_eq!(proof_res, expected) - } - } -} diff --git a/contracts/axone-dataverse/src/credential/rdf_marker.rs b/contracts/axone-dataverse/src/credential/rdf_marker.rs deleted file mode 100644 index a92a99dc..00000000 --- a/contracts/axone-dataverse/src/credential/rdf_marker.rs +++ /dev/null @@ -1,59 +0,0 @@ -use rio_api::model::{NamedNode, Term}; - -pub const RDF_TYPE: NamedNode<'_> = NamedNode { - iri: "http://www.w3.org/1999/02/22-rdf-syntax-ns#type", -}; -pub const RDF_CREATED: NamedNode<'_> = NamedNode { - iri: "http://purl.org/dc/terms/created", -}; -pub const RDF_DATE_TYPE: NamedNode<'_> = NamedNode { - iri: "http://www.w3.org/2001/XMLSchema#dateTime", -}; - -pub const RDF_UNSIGNED_INT: NamedNode<'_> = NamedNode { - iri: "http://www.w3.org/2001/XMLSchema#unsignedInt", -}; - -pub const RDF_UNSIGNED_LONG: NamedNode<'_> = NamedNode { - iri: "http://www.w3.org/2001/XMLSchema#unsignedLong", -}; - -pub const IRI_VC_TYPE: &str = "https://www.w3.org/2018/credentials#VerifiableCredential"; -pub const VC_RDF_TYPE: Term<'_> = Term::NamedNode(NamedNode { iri: IRI_VC_TYPE }); -pub const VC_RDF_ISSUER: NamedNode<'_> = NamedNode { - iri: "https://www.w3.org/2018/credentials#issuer", -}; -pub const VC_RDF_ISSUANCE_DATE: NamedNode<'_> = NamedNode { - iri: "https://www.w3.org/2018/credentials#issuanceDate", -}; -pub const VC_RDF_EXPIRATION_DATE: NamedNode<'_> = NamedNode { - iri: "https://www.w3.org/2018/credentials#expirationDate", -}; -pub const VC_RDF_CREDENTIAL_SUBJECT: NamedNode<'_> = NamedNode { - iri: "https://www.w3.org/2018/credentials#credentialSubject", -}; -pub const VC_RDF_CREDENTIAL_STATUS: NamedNode<'_> = NamedNode { - iri: "https://www.w3.org/2018/credentials#credentialStatus", -}; - -pub const VC_RDF_PROOF: NamedNode<'_> = NamedNode { - iri: "https://w3id.org/security#proof", -}; -pub const PROOF_RDF_VERIFICATION_METHOD: NamedNode<'_> = NamedNode { - iri: "https://w3id.org/security#verificationMethod", -}; -pub const PROOF_RDF_PROOF_PURPOSE: NamedNode<'_> = NamedNode { - iri: "https://w3id.org/security#proofPurpose", -}; -pub const PROOF_RDF_PROOF_VALUE: NamedNode<'_> = NamedNode { - iri: "https://w3id.org/security#proofValue", -}; -pub const PROOF_RDF_JWS: NamedNode<'_> = NamedNode { - iri: "https://w3id.org/security#jws", -}; -pub const PROOF_RDF_PROOF_VALUE_TYPE: NamedNode<'_> = NamedNode { - iri: "https://w3id.org/security#multibase", -}; -pub const PROOF_RDF_CRYPTOSUITE: NamedNode<'_> = NamedNode { - iri: "https://w3id.org/security#cryptosuite", -}; diff --git a/contracts/axone-dataverse/src/credential/vc.rs b/contracts/axone-dataverse/src/credential/vc.rs deleted file mode 100644 index c806bdbd..00000000 --- a/contracts/axone-dataverse/src/credential/vc.rs +++ /dev/null @@ -1,490 +0,0 @@ -use crate::credential::error::{InvalidCredentialError, InvalidProofError, VerificationError}; -use crate::credential::proof::{Proof, ProofPurpose}; -use crate::credential::rdf_marker::*; -use axone_rdf::dataset::QuadIterator; -use axone_rdf::dataset::{Dataset, QuadPattern}; -use bech32::Bech32; -use cosmwasm_std::{Addr, DepsMut}; -use itertools::Itertools; -use rio_api::model::{BlankNode, Literal, NamedNode, Subject, Term}; -use ripemd::Ripemd160; -use sha2::Digest; - -#[derive(Debug, PartialEq)] -pub struct VerifiableCredential<'a> { - pub id: &'a str, - pub types: Vec<&'a str>, - pub issuer: &'a str, - pub issuance_date: &'a str, - pub expiration_date: Option<&'a str>, - pub claims: Vec>, - pub status: Option>, - pub proof: Vec>, - unsecured_document: Dataset<'a>, -} - -#[derive(Debug, PartialEq)] -pub struct Claim<'a> { - pub id: &'a str, - pub content: Dataset<'a>, -} - -#[derive(Debug, PartialEq)] -pub struct Status<'a> { - id: &'a str, - type_: &'a str, - content: Dataset<'a>, -} - -impl<'a> TryFrom<&'a Dataset<'a>> for VerifiableCredential<'a> { - type Error = InvalidCredentialError; - - fn try_from(dataset: &'a Dataset<'a>) -> Result { - let id = Self::extract_identifier(dataset)?; - - let (proofs, proof_graphs): (Vec>, Vec>) = - Self::extract_proofs(dataset, id)?.into_iter().unzip(); - - let mut unsecured_filter: Vec> = proof_graphs - .into_iter() - .map(|g| (None, None, None, Some(Some(g.into()))).into()) - .collect(); - - unsecured_filter.push((Some(id.into()), Some(VC_RDF_PROOF), None, None).into()); - - Ok(Self { - id: id.iri, - types: Self::extract_types(dataset, id)?, - issuer: Self::extract_issuer(dataset, id)?.iri, - issuance_date: Self::extract_issuance_date(dataset, id)?, - expiration_date: Self::extract_expiration_date(dataset, id)?, - claims: Self::extract_claims(dataset, id)?, - status: Self::extract_status(dataset, id)?, - proof: proofs, - unsecured_document: Dataset::new( - dataset - .iter() - .skip_patterns(unsecured_filter) - .copied() - .collect(), - ), - }) - } -} - -impl<'a> VerifiableCredential<'a> { - pub fn verify(&self, deps: &'_ DepsMut<'_>) -> Result<(), VerificationError> { - let proof = self - .proof - .iter() - .find(|p| p.suitable(self.issuer, ProofPurpose::AssertionMethod)) - .ok_or(VerificationError::NoSuitableProof)?; - - let crypto_suite = proof.crypto_suite(); - crypto_suite.verify_document( - deps, - self.unsecured_document.as_ref(), - proof.options(), - proof.proof_material(), - proof.pub_key(), - ) - } - - // Tells if the credential was issued by the given address. - pub fn is_issued_by(&self, addr: &Addr) -> bool { - const SECP256K1PUB_MULTICODEC_PREFIX: [u8; 2] = [0xe7, 0x01]; - const ED25519PUB_MULTICODEC_PREFIX: [u8; 2] = [0xed, 0x01]; - const PREFIX: &str = "did:key:"; - - if !self.issuer.starts_with(PREFIX) { - return false; - } - - let encoded = &self.issuer[PREFIX.len()..]; - let decoded = match multibase::decode(encoded) { - Ok((_, bytes)) => bytes, - Err(_) => return false, - }; - - let (prefix, pubkey) = decoded.split_at(2); - if prefix != SECP256K1PUB_MULTICODEC_PREFIX && prefix != ED25519PUB_MULTICODEC_PREFIX { - return false; - } - - let (hrp, _) = match bech32::decode(addr.as_str()) { - Ok(decoded) => decoded, - Err(_) => return false, - }; - - let pubkey_hash = { - let hash = sha2::Sha256::digest(pubkey); - - Ripemd160::digest(hash) - }; - - let bech32_addr = match bech32::encode::(hrp, &pubkey_hash) { - Ok(addr) => addr, - Err(_) => return false, - }; - - bech32_addr == addr.as_str() - } - - fn extract_identifier( - dataset: &'a Dataset<'a>, - ) -> Result, InvalidCredentialError> { - dataset - .match_pattern(None, Some(RDF_TYPE), Some(VC_RDF_TYPE), None) - .subjects() - .exactly_one() - .map_err(|e| match e.size_hint() { - (_, Some(_)) => InvalidCredentialError::Malformed( - "Credential cannot have more than one id".to_string(), - ), - _ => InvalidCredentialError::MissingIdentifier, - }) - .and_then(|s| match s { - Subject::NamedNode(n) => Ok(n), - _ => Err(InvalidCredentialError::Malformed( - "Credential identifier must be a named node".to_string(), - )), - }) - } - - fn extract_types( - dataset: &'a Dataset<'a>, - id: NamedNode<'a>, - ) -> Result, InvalidCredentialError> { - dataset - .match_pattern(Some(id.into()), Some(RDF_TYPE), None, None) - .objects() - .map(|o| match o { - Term::NamedNode(n) => Ok(n.iri), - _ => Err(InvalidCredentialError::Malformed( - "Credential type must be a named node".to_string(), - )), - }) - .collect() - } - - fn extract_issuer( - dataset: &'a Dataset<'a>, - id: NamedNode<'a>, - ) -> Result, InvalidCredentialError> { - dataset - .match_pattern(Some(id.into()), Some(VC_RDF_ISSUER), None, None) - .objects() - .exactly_one() - .map_err(|e| match e.size_hint() { - (_, Some(_)) => InvalidCredentialError::MissingIssuer, - _ => InvalidCredentialError::Malformed( - "Credential cannot have more than one issuer".to_string(), - ), - }) - .and_then(|o| match o { - Term::NamedNode(n) => Ok(n), - _ => Err(InvalidCredentialError::Malformed( - "Credential issuer must be a named node".to_string(), - )), - }) - } - - fn extract_issuance_date( - dataset: &'a Dataset<'a>, - id: NamedNode<'a>, - ) -> Result<&'a str, InvalidCredentialError> { - dataset - .match_pattern(Some(id.into()), Some(VC_RDF_ISSUANCE_DATE), None, None) - .objects() - .exactly_one() - .map_err(|e| match e.size_hint() { - (_, Some(_)) => InvalidCredentialError::MissingIssuanceDate, - _ => InvalidCredentialError::Malformed( - "Credential cannot have more than one issuance date".to_string(), - ), - }) - .and_then(|o| match o { - Term::Literal(Literal::Typed { value, datatype }) if datatype == RDF_DATE_TYPE => { - Ok(value) - } - _ => Err(InvalidCredentialError::Malformed( - "Credential issuance date must be a date".to_string(), - )), - }) - } - - fn extract_expiration_date( - dataset: &'a Dataset<'a>, - id: NamedNode<'a>, - ) -> Result, InvalidCredentialError> { - dataset - .match_pattern(Some(id.into()), Some(VC_RDF_EXPIRATION_DATE), None, None) - .objects() - .at_most_one() - .map_err(|_| { - InvalidCredentialError::Malformed( - "Credential cannot have more than one expiration date".to_string(), - ) - }) - .and_then(|o| match o { - Some(t) => match t { - Term::Literal(Literal::Typed { value, datatype }) - if datatype == RDF_DATE_TYPE => - { - Ok(Some(value)) - } - _ => Err(InvalidCredentialError::Malformed( - "Credential expiration date must be a date".to_string(), - )), - }, - None => Ok(None), - }) - } - - fn extract_claims( - dataset: &'a Dataset<'a>, - id: NamedNode<'a>, - ) -> Result>, InvalidCredentialError> { - dataset - .match_pattern(Some(id.into()), Some(VC_RDF_CREDENTIAL_SUBJECT), None, None) - .objects() - .map(|claim_id| match claim_id { - Term::NamedNode(n) => Ok(n), - _ => Err(InvalidCredentialError::Malformed( - "Credential claim ids must be named nodes".to_string(), - )), - }) - .map_ok(|claim_id| Claim { - id: claim_id.iri, - content: dataset.sub_graph(claim_id.into()), - }) - .collect() - } - - fn extract_status( - dataset: &'a Dataset<'a>, - id: NamedNode<'a>, - ) -> Result>, InvalidCredentialError> { - dataset - .match_pattern(Some(id.into()), Some(VC_RDF_CREDENTIAL_STATUS), None, None) - .objects() - .at_most_one() - .map_err(|_| { - InvalidCredentialError::Malformed( - "Credential cannot have more than one expiration date".to_string(), - ) - }) - .and_then(|maybe_term| match maybe_term { - Some(term) => match term { - Term::NamedNode(n) => Ok(Some(Status { - id: n.iri, - type_: Self::extract_types(dataset, n)? - .iter() - .exactly_one() - .map_err(|_| { - InvalidCredentialError::Malformed( - "Credential status can only have one type".to_string(), - ) - })?, - content: Dataset::new( - dataset - .match_pattern(Some(n.into()), None, None, None) - .copied() - .collect(), - ), - })), - _ => Err(InvalidCredentialError::Malformed( - "Credential status id must be a named node".to_string(), - )), - }, - None => Ok(None), - }) - } - - fn extract_proofs( - dataset: &'a Dataset<'a>, - id: NamedNode<'a>, - ) -> Result, BlankNode<'a>)>, InvalidCredentialError> { - dataset - .match_pattern(Some(id.into()), Some(VC_RDF_PROOF), None, None) - .objects() - .filter_map(|o| match o { - Term::BlankNode(n) => { - let proof_res = Proof::try_from((dataset, n.into())); - match proof_res { - Err(InvalidProofError::Unsupported) => None, - _ => Some( - proof_res - .map(|p| (p, n)) - .map_err(InvalidCredentialError::from), - ), - } - } - _ => Some(Err(InvalidCredentialError::Malformed( - "Credential proof must be encapsulated in blank node graph names".to_string(), - ))), - }) - .collect() - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::testutil::testutil; - use cosmwasm_std::testing::mock_dependencies; - use rio_api::model::Quad; - - #[test] - fn proper_vc_from_dataset() { - let owned_quads = testutil::read_test_quads("vc-eddsa-2020-ok-unsecured.nq"); - let unsecure_dataset = Dataset::from(owned_quads.as_slice()); - - let owned_quads = testutil::read_test_quads("vc-eddsa-2020-ok.nq"); - let dataset = Dataset::from(owned_quads.as_slice()); - - let vc_res = VerifiableCredential::try_from(&dataset); - assert!(vc_res.is_ok()); - let vc = vc_res.unwrap(); - assert_eq!(vc.id, "http://example.edu/credentials/3732"); - assert_eq!( - vc.types, - vec![ - "https://example.org/examples#UniversityDegreeCredential", - "https://www.w3.org/2018/credentials#VerifiableCredential" - ] - ); - assert_eq!( - vc.issuer, - "did:key:z6MkpwdnLPAm4apwcrRYQ6fZ3rAcqjLZR4AMk14vimfnozqY" - ); - assert_eq!(vc.issuance_date, "2024-02-16T00:00:00Z"); - assert_eq!(vc.expiration_date, Some("2026-02-16T00:00:00Z")); - assert_eq!( - vc.claims, - vec![Claim { - id: "did:key:zDnaeUm3QkcyZWZTPttxB711jgqRDhkwvhF485SFw1bDZ9AQw", - content: Dataset::new(vec![ - Quad { - subject: NamedNode { - iri: "did:key:zDnaeUm3QkcyZWZTPttxB711jgqRDhkwvhF485SFw1bDZ9AQw" - } - .into(), - predicate: NamedNode { - iri: "https://example.org/examples#degree" - }, - object: BlankNode { id: "b2" }.into(), - graph_name: None - }, - Quad { - subject: BlankNode { id: "b2" }.into(), - predicate: NamedNode { - iri: "http://schema.org/name" - }, - object: Literal::Typed { - value: "Bachelor of Science and Arts", - datatype: NamedNode { - iri: "http://www.w3.org/1999/02/22-rdf-syntax-ns#HTML" - } - } - .into(), - graph_name: None - }, - Quad { - subject: BlankNode { id: "b2" }.into(), - predicate: NamedNode { - iri: "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" - }, - object: NamedNode { - iri: "https://example.org/examples#BachelorDegree" - } - .into(), - graph_name: None - } - ]) - }] - ); - assert_eq!(vc.status, None); - assert_eq!(vc.proof.len(), 1usize); - assert_eq!(vc.unsecured_document, unsecure_dataset); - } - - #[test] - fn is_issued_by() { - struct TC<'a> { - issuer: &'a str, - addr: Addr, - expected: bool, - } - let cases = vec![ - TC { - issuer: "did:key:zQ3shsoarhrw7SoyUyoCbwv8k2BRRLeqkjRkffGqx7WNpMQgw", - addr: Addr::unchecked("axone178mjppxcf3n9q3q7utdwrajdal0tsqvymz0900"), - expected: true, - }, - TC { - issuer: "did:key:z6MkvMXwgwJTJacfBGk5fxr3d4k3uzh4eHTi3oFagNyK55Tt", - addr: Addr::unchecked("axone1el0ln38j0qvkr22pwztelv3hxrsc0gx5zjsfky"), - expected: true, - }, - TC { - issuer: "did:key:zQ3shsoarhrw7SoyUyoCbwv8k2BRRLeqkjRkffGqx7WNpMQgw", - addr: Addr::unchecked("axone1hg3htshrh9xnhrmwrxnfnu0dtlx6345lu2c09f"), - expected: false, - }, - TC { - issuer: "did:foo:zQ3shsoarhrw7SoyUyoCbwv8k2BRRLeqkjRkffGqx7WNpMQgw", - addr: Addr::unchecked("axone178mjppxcf3n9q3q7utdwrajdal0tsqvymz0900"), - expected: false, - }, - TC { - issuer: "did:key:foo", - addr: Addr::unchecked("axone178mjppxcf3n9q3q7utdwrajdal0tsqvymz0900"), - expected: false, - }, - TC { - issuer: "did:key:z6LSeu9HkTHSfLLeUs2nnzUSNedgDUevfNQgQjQC23ZCit6F", - addr: Addr::unchecked("axone178mjppxcf3n9q3q7utdwrajdal0tsqvymz0900"), - expected: false, - }, - TC { - issuer: "did:key:z6MkvMXwgwJTJacfB", - addr: Addr::unchecked("axone1el0ln38j0qvkr22pwztelv3hxrsc0gx5zjsfky"), - expected: false, - }, - TC { - issuer: "did:key:zQ3shsoarhrw7SoyUyoCbwv8k2BRRLeqkjRkffGqx7WNpMQgw", - addr: Addr::unchecked("178mjppxcf3n9q3q7utdwrajdal0tsqvymz0900"), - expected: false, - }, - ]; - for tc in cases { - let owned_quads = testutil::read_test_quads("vc-eddsa-2020-ok-unsecured.nq"); - let dataset = Dataset::from(owned_quads.as_slice()); - - let mut vc_res = VerifiableCredential::try_from(&dataset).expect("vc from dataset"); - vc_res.issuer = tc.issuer; - - assert_eq!(vc_res.is_issued_by(&tc.addr), tc.expected); - } - } - - #[test] - fn vc_verify() { - let cases = vec![ - "vc-eddsa-2018-ok.nq", - "vc-eddsa-2020-ok.nq", - "vc-ecdsa-2019-ok.nq", - "vc-di-ed-ok.nq", - ]; - let mut deps = mock_dependencies(); - - for case in cases { - let owned_quads = testutil::read_test_quads(case); - let dataset = Dataset::from(owned_quads.as_slice()); - let vc = VerifiableCredential::try_from(&dataset).unwrap(); - let verif_res = vc.verify(&deps.as_mut()); - assert!(verif_res.is_ok()); - } - } -} diff --git a/contracts/axone-dataverse/src/error.rs b/contracts/axone-dataverse/src/error.rs deleted file mode 100644 index 9e11b46f..00000000 --- a/contracts/axone-dataverse/src/error.rs +++ /dev/null @@ -1,35 +0,0 @@ -use crate::credential::error::{InvalidCredentialError, VerificationError}; -use axone_rdf::serde::NQuadsReadError; -use cosmwasm_std::{Instantiate2AddressError, StdError}; -use cw_utils::PaymentError; -use thiserror::Error; - -#[derive(Debug, Error)] -pub enum ContractError { - #[error("{0}")] - Std(#[from] StdError), - - #[error("{0}")] - Instantiate2Address(#[from] Instantiate2AddressError), - - #[error("Couldn't parse RDF: '{0}'")] - ParseRDF(#[from] NQuadsReadError), - - #[error("Invalid credential: '{0}'")] - InvalidCredential(#[from] InvalidCredentialError), - - #[error("Credential verification failed: '{0}'")] - CredentialVerification(#[from] VerificationError), - - #[error("Credential not supported: '{0}'")] - UnsupportedCredential(String), - - #[error("Credential already exists: '{0}'")] - CredentialAlreadyExists(String), - - #[error("An unexpected error occurred: {0}")] - Unexpected(String), - - #[error("{0}")] - Payment(#[from] PaymentError), -} diff --git a/contracts/axone-dataverse/src/lib.rs b/contracts/axone-dataverse/src/lib.rs deleted file mode 100644 index 4e62335f..00000000 --- a/contracts/axone-dataverse/src/lib.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod contract; -mod credential; -mod error; -pub mod msg; -mod registrar; -pub mod state; -mod testutil; - -pub use crate::error::ContractError; diff --git a/contracts/axone-dataverse/src/msg.rs b/contracts/axone-dataverse/src/msg.rs deleted file mode 100644 index 9f8156ce..00000000 --- a/contracts/axone-dataverse/src/msg.rs +++ /dev/null @@ -1,197 +0,0 @@ -use cosmwasm_schema::{cw_serde, QueryResponses}; -use cosmwasm_std::{Addr, Binary, Uint128, Uint64}; - -/// `InstantiateMsg` is used to initialize a new instance of the dataverse. -#[cw_serde] -pub struct InstantiateMsg { - /// A unique name to identify the dataverse instance. - pub name: String, - - /// The configuration used to instantiate the triple store. - pub triplestore_config: TripleStoreConfig, -} - -/// `ExecuteMsg` defines the set of possible actions that can be performed on the dataverse. -/// -/// This enum provides variants for registering services, datasets, and other operations related to the dataverse. -#[cw_serde] -pub enum ExecuteMsg { - /// # SubmitClaims - /// Submits new claims about a resource to the dataverse. - /// - /// The SubmitClaims message is a pivotal component in the dataverse, enabling entities to contribute new claims about various - /// resources. A claim represents a statement made by an entity, referred to as the issuer, which could be a person, organization, - /// or service. These claims pertain to a diverse range of resources, including digital resources, services, zones, or individuals, - /// and are asserted as factual by the issuer. - /// - /// #### Format - /// - /// Claims are injected into the dataverse through Verifiable Credentials (VCs). - /// - /// Primarily, the claims leverage the AXONE ontology, which facilitates articulating assertions about widely acknowledged resources - /// in the dataverse, including digital services, digital resources, zones, governance, and more. - /// - /// Additionally, other schemas may also be employed to supplement and enhance the validated knowledge contributed to these resources. - /// - /// #### Preconditions - /// - /// To maintain integrity and coherence in the dataverse, several preconditions are set for the submission of claims: - /// - /// 1. **Format Requirement**: Claims must be encapsulated within Verifiable Credentials (VCs). - /// - /// 2. **Unique Identifier Mandate**: Each Verifiable Credential within the dataverse must possess a unique identifier. - /// - /// 3. **Issuer Verification**: Claims are accepted if they either: - /// - Bear a verifiable issuer's signature to ensure authenticity. - /// - Originate from the transaction sender, in which case the transaction signature serves as proof of authenticity. - /// - /// 4. **Content**: The actual implementation supports the submission of a single Verifiable Credential, containing a single claim. - /// - /// #### Supported cryptographic proofs - /// - /// - `Ed25519Signature2018` - /// - /// - `Ed25519Signature2020` - /// - /// - `EcdsaSecp256k1Signature2019` - /// - /// - `DataIntegrity` with the following cryptosuites: `eddsa-2022`, `eddsa-rdfc-2022`. - /// - SubmitClaims { - /// The Verifiable Credential containing the claims. - /// The claims must be serialized in the format specified by the `format` field. - claims: Binary, - /// RDF dataset serialization format for the claims. - /// If not provided, the default format is [N-Quads](https://www.w3.org/TR/n-quads/) format. - format: Option, - }, - - /// # RevokeClaims - /// Revoke or withdraw a previously submitted claims. - /// - /// #### Preconditions: - /// - /// 1. **Identifier Existence**: The identifier of the claims must exist in the dataverse. - RevokeClaims { - /// The unique identifier of the claims to be revoked. - identifier: Uri, - }, -} - -/// # TripleStoreConfig -/// `TripleStoreConfig` represents the configuration related to the management of the triple store. -#[cw_serde] -pub struct TripleStoreConfig { - /// The code id that will be used to instantiate the triple store contract in which - /// to store dataverse semantic data. It must implement the cognitarium interface. - pub code_id: Uint64, - - /// Limitations regarding triple store usage. - pub limits: TripleStoreLimitsInput, -} - -/// # TripleStoreLimitsInput -/// Contains requested limitations regarding store usages. -#[cw_serde] -#[derive(Default)] -pub struct TripleStoreLimitsInput { - /// The maximum number of triples the store can contain. - /// Default to [Uint128::MAX] if not set, which can be considered as no limit. - pub max_triple_count: Option, - /// The maximum number of bytes the store can contain. - /// The size of a triple is counted as the sum of the size of its subject, predicate and object, - /// including the size of data types and language tags if any. - /// Default to [Uint128::MAX] if not set, which can be considered as no limit. - pub max_byte_size: Option, - /// The maximum number of bytes the store can contain for a single triple. - /// The size of a triple is counted as the sum of the size of its subject, predicate and object, - /// including the size of data types and language tags if any. The limit is used to prevent - /// storing very large triples, especially literals. - /// Default to [Uint128::MAX] if not set, which can be considered as no limit. - pub max_triple_byte_size: Option, - /// The maximum limit of a query, i.e. the maximum number of triples returned by a select query. - /// Default to 30 if not set. - pub max_query_limit: Option, - /// The maximum number of variables a query can select. - /// Default to 30 if not set. - pub max_query_variable_count: Option, - /// The maximum number of bytes an insert data query can contain. - /// Default to [Uint128::MAX] if not set, which can be considered as no limit. - pub max_insert_data_byte_size: Option, - /// The maximum number of triples an insert data query can contain (after parsing). - /// Default to [Uint128::MAX] if not set, which can be considered as no limit. - pub max_insert_data_triple_count: Option, -} - -impl From for axone_cognitarium::msg::StoreLimitsInput { - fn from(value: TripleStoreLimitsInput) -> Self { - let mut limits = axone_cognitarium::msg::StoreLimitsInput::default(); - if let Some(max_triple_count) = value.max_triple_count { - limits.max_triple_count = max_triple_count; - } - if let Some(max_byte_size) = value.max_byte_size { - limits.max_byte_size = max_byte_size; - } - if let Some(max_triple_byte_size) = value.max_triple_byte_size { - limits.max_triple_byte_size = max_triple_byte_size; - } - if let Some(max_query_limit) = value.max_query_limit { - limits.max_query_limit = max_query_limit; - } - if let Some(max_query_variable_count) = value.max_query_variable_count { - limits.max_query_variable_count = max_query_variable_count; - } - if let Some(max_insert_data_byte_size) = value.max_insert_data_byte_size { - limits.max_insert_data_byte_size = max_insert_data_byte_size; - } - if let Some(max_insert_data_triple_count) = value.max_insert_data_triple_count { - limits.max_insert_data_triple_count = max_insert_data_triple_count; - } - - limits - } -} - -/// # RdfDatasetFormat -/// Represents the various serialization formats for an RDF dataset, i.e. a collection of RDF graphs -/// ([RDF Dataset](https://www.w3.org/TR/rdf11-concepts/#section-dataset)). -#[cw_serde] -#[derive(Default)] -pub enum RdfDatasetFormat { - /// # NQuads - /// N-Quads Format - /// - /// N-Quads is an extension of N-Triples to support RDF datasets by adding an optional fourth element to represent the graph name. - /// See the [official N-Quads specification](https://www.w3.org/TR/n-quads/). - #[serde(rename = "n_quads")] - #[default] - NQuads, -} - -/// # Uri -/// `Uri` represents a Uniform Resource Identifier (URI), a string of characters that provides a simple way -/// to identify a resource. -/// see https://en.wikipedia.org/wiki/Uniform_Resource_Identifier. -type Uri = String; - -/// `QueryMsg` defines the set of possible queries that can be made to retrieve information about the dataverse. -/// -/// This enum provides variants for querying the dataverse's details and other related information. -#[cw_serde] -#[derive(QueryResponses)] -pub enum QueryMsg { - /// # Dataverse - /// Retrieves information about the current dataverse instance. - #[returns(DataverseResponse)] - Dataverse {}, -} - -/// # DataverseResponse -/// DataverseResponse is the response of the Dataverse query. -#[cw_serde] -pub struct DataverseResponse { - /// The name of the dataverse. - pub name: String, - /// The cognitarium contract address. - pub triplestore_address: Addr, -} diff --git a/contracts/axone-dataverse/src/registrar/credential.rs b/contracts/axone-dataverse/src/registrar/credential.rs deleted file mode 100644 index 55e279ce..00000000 --- a/contracts/axone-dataverse/src/registrar/credential.rs +++ /dev/null @@ -1,146 +0,0 @@ -use crate::credential::rdf_marker::IRI_VC_TYPE; -use crate::credential::vc::{Claim, VerifiableCredential}; -use crate::ContractError; -use cosmwasm_std::{Addr, Env, MessageInfo}; -use itertools::Itertools; - -#[derive(Debug, PartialEq)] -pub struct DataverseCredential<'a> { - pub height: String, - pub timestamp: String, - pub tx_index: Option, - pub sender: Addr, - pub id: &'a str, - pub issuer: &'a str, - pub r#type: &'a str, - pub valid_from: &'a str, - pub valid_until: Option<&'a str>, - pub claim: &'a Claim<'a>, -} - -impl<'a> DataverseCredential<'a> { - fn extract_vc_type(vc: &'a VerifiableCredential<'a>) -> Result<&'a str, ContractError> { - vc.types - .iter() - .filter(|t| *t != &IRI_VC_TYPE) - .exactly_one() - .map_err(|_| { - ContractError::UnsupportedCredential( - "credential is expected to have exactly one type".to_string(), - ) - }) - .copied() - } - - fn extract_vc_claim(vc: &'a VerifiableCredential<'a>) -> Result<&'a Claim<'a>, ContractError> { - vc.claims.iter().exactly_one().map_err(|_| { - ContractError::UnsupportedCredential( - "credential is expected to contain exactly one claim".to_string(), - ) - }) - } -} - -impl<'a> TryFrom<(Env, MessageInfo, &'a VerifiableCredential<'a>)> for DataverseCredential<'a> { - type Error = ContractError; - - fn try_from( - (env, info, vc): (Env, MessageInfo, &'a VerifiableCredential<'a>), - ) -> Result { - Ok(DataverseCredential { - height: env.block.height.to_string(), - timestamp: env.block.time.seconds().to_string(), - tx_index: env.transaction.map(|tx| tx.index.to_string()), - sender: info.sender, - id: vc.id, - issuer: vc.issuer, - r#type: DataverseCredential::extract_vc_type(vc)?, - valid_from: vc.issuance_date, - valid_until: vc.expiration_date, - claim: DataverseCredential::extract_vc_claim(vc)?, - }) - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::testutil::testutil; - use axone_rdf::dataset::Dataset; - use cosmwasm_std::testing::message_info; - use rio_api::model::{Literal, NamedNode, Quad}; - use testing::addr::{addr, SENDER}; - use testing::mock::mock_env_addr; - - #[test] - fn proper_from_verifiable() { - let owned_quads = testutil::read_test_quads("vc-valid.nq"); - let dataset = Dataset::from(owned_quads.as_slice()); - let vc = VerifiableCredential::try_from(&dataset).unwrap(); - let dc_res = - DataverseCredential::try_from((mock_env_addr(), message_info(&addr(SENDER), &[]), &vc)); - - assert!(dc_res.is_ok()); - assert_eq!(dc_res.unwrap(), DataverseCredential { - height: "12345".to_string(), - timestamp: "1571797419".to_string(), - tx_index: Some("3".to_string()), - sender: addr(SENDER), - id: "https://w3id.org/axone/ontology/vnext/schema/credential/digital-service/description/72cab400-5bd6-4eb4-8605-a5ee8c1a45c9", - issuer: "did:key:zQ3shs7auhJSmVJpiUbQWco6bxxEhSqWnVEPvaBHBRvBKw6Q3", - r#type: "https://w3id.org/axone/ontology/vnext/schema/credential/digital-service/description/DigitalServiceDescriptionCredential", - valid_from: "2024-01-22T00:00:00", - valid_until: Some("2025-01-22T00:00:00"), - claim: &Claim { - id: "did:key:zQ3shhb4SvzBRLbBonsvKb3WX6WoDeKWHpsXXXMhAJETqXAfB", - content: Dataset::new(vec![Quad { - subject: NamedNode {iri: "did:key:zQ3shhb4SvzBRLbBonsvKb3WX6WoDeKWHpsXXXMhAJETqXAfB"}.into(), - predicate: NamedNode {iri: "https://w3id.org/axone/ontology/vnext/schema/credential/digital-service/description/hasCategory"}.into(), - object: NamedNode{iri: "https://w3id.org/axone/ontology/vnext/thesaurus/digital-service-category/Storage"}.into(), - graph_name: None, - },Quad { - subject: NamedNode {iri: "did:key:zQ3shhb4SvzBRLbBonsvKb3WX6WoDeKWHpsXXXMhAJETqXAfB"}.into(), - predicate: NamedNode {iri: "https://w3id.org/axone/ontology/vnext/schema/credential/digital-service/description/hasTag"}.into(), - object: Literal::Simple {value: "Cloud"}.into(), - graph_name: None, - }]), - }, - }) - } - - #[test] - fn unsupported_from_verifiable() { - let cases = vec![ - ( - "vc-unsupported-1.nq", - "credential is expected to have exactly one type", - ), - ( - "vc-unsupported-2.nq", - "credential is expected to have exactly one type", - ), - ( - "vc-unsupported-3.nq", - "credential is expected to contain exactly one claim", - ), - ]; - - for case in cases { - let owned_quads = testutil::read_test_quads(case.0); - let dataset = Dataset::from(owned_quads.as_slice()); - let vc = VerifiableCredential::try_from(&dataset).unwrap(); - let dc_res = DataverseCredential::try_from(( - mock_env_addr(), - message_info(&addr(SENDER), &[]), - &vc, - )); - - assert!(dc_res.is_err()); - if let ContractError::UnsupportedCredential(msg) = dc_res.err().unwrap() { - assert_eq!(msg, case.1.to_string()); - } else { - assert!(false); - } - } - } -} diff --git a/contracts/axone-dataverse/src/registrar/mod.rs b/contracts/axone-dataverse/src/registrar/mod.rs deleted file mode 100644 index 767ae316..00000000 --- a/contracts/axone-dataverse/src/registrar/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub mod credential; -mod rdf; -pub mod registry; diff --git a/contracts/axone-dataverse/src/registrar/rdf.rs b/contracts/axone-dataverse/src/registrar/rdf.rs deleted file mode 100644 index 6f719491..00000000 --- a/contracts/axone-dataverse/src/registrar/rdf.rs +++ /dev/null @@ -1,371 +0,0 @@ -use crate::credential::rdf_marker::{RDF_DATE_TYPE, RDF_UNSIGNED_INT, RDF_UNSIGNED_LONG}; -use crate::registrar::credential::DataverseCredential; -use crate::ContractError; -use axone_rdf::dataset::QuadIterator; -use axone_rdf::normalize::IdentifierIssuer; -use axone_rdf::serde::{DataFormat, TripleWriter}; -use cosmwasm_std::{Binary, StdError}; -use rio_api::model::{BlankNode, Literal, NamedNode, Subject, Term, Triple}; - -pub const VC_RESERVED_PREDICATES: &[NamedNode<'_>] = &[ - VC_HEADER_HEIGHT, - VC_HEADER_TIMESTAMP, - VC_HEADER_TX, - VC_HEADER_SENDER, - VC_BODY_TYPE, - VC_BODY_ISSUER, - VC_BODY_VALID_FROM, - VC_BODY_VALID_UNTIL, - VC_BODY_SUBJECT, - VC_BODY_CLAIM, - VC_CLAIM_ORIGINAL_NODE, -]; - -pub const VC_HEADER_HEIGHT: NamedNode<'_> = NamedNode { - iri: "dataverse:credential:header#height", -}; -pub const VC_HEADER_TIMESTAMP: NamedNode<'_> = NamedNode { - iri: "dataverse:credential:header#timestamp", -}; -pub const VC_HEADER_TX: NamedNode<'_> = NamedNode { - iri: "dataverse:credential:header#tx_index", -}; -pub const VC_HEADER_SENDER: NamedNode<'_> = NamedNode { - iri: "dataverse:credential:header#sender", -}; -pub const VC_BODY_TYPE: NamedNode<'_> = NamedNode { - iri: "dataverse:credential:body#type", -}; -pub const VC_BODY_ISSUER: NamedNode<'_> = NamedNode { - iri: "dataverse:credential:body#issuer", -}; -pub const VC_BODY_VALID_FROM: NamedNode<'_> = NamedNode { - iri: "dataverse:credential:body#validFrom", -}; -pub const VC_BODY_VALID_UNTIL: NamedNode<'_> = NamedNode { - iri: "dataverse:credential:body#validUntil", -}; -pub const VC_BODY_SUBJECT: NamedNode<'_> = NamedNode { - iri: "dataverse:credential:body#subject", -}; -pub const VC_BODY_CLAIM: NamedNode<'_> = NamedNode { - iri: "dataverse:credential:body#claim", -}; - -/// Used when a claim triple contains a named node as object to establish a hierarchy, we replace this hierarchical link -/// with a blank node, and this predicate is used to allow the reconciliation with the original named node. -pub const VC_CLAIM_ORIGINAL_NODE: NamedNode<'_> = NamedNode { - iri: "dataverse:claim#original-node", -}; - -impl<'a> DataverseCredential<'a> { - pub fn serialize(&self, format: DataFormat) -> Result { - if self.contains_reserved_predicates() { - Err(ContractError::UnsupportedCredential( - "Claim contains reserved predicates.".to_string(), - ))?; - } - - let claim_node = BlankNode { id: "c0" }; - // Used to rename all blank nodes to avoid conflict with the forged claim node `c0` - let mut blank_issuer = IdentifierIssuer::new("b", 0u128); - // Used to replace named node based hierarchy with blank nodes - let mut named_issuer = IdentifierIssuer::new("a", 0u128); - let triples: Vec> = - self.as_triples(claim_node, &mut named_issuer, &mut blank_issuer)?; - let out: Vec = Vec::default(); - let mut writer = TripleWriter::new(&format, out); - for triple in triples { - writer.write(&triple).map_err(|e| { - StdError::serialize_err("triple", format!("Error writing triple: {e}")) - })?; - } - - Ok(Binary::from(writer.finish().map_err(|e| { - StdError::serialize_err("triple", format!("Error writing triple: {e}")) - })?)) - } - - fn as_triples( - &'a self, - claim_node: BlankNode<'a>, - named_issuer: &'a mut IdentifierIssuer, - blank_issuer: &'a mut IdentifierIssuer, - ) -> Result>, ContractError> { - let c_subject = Subject::NamedNode(NamedNode { iri: self.id }); - - let mut triples = vec![ - Triple { - subject: c_subject, - predicate: VC_HEADER_HEIGHT, - object: Term::Literal(Literal::Typed { - value: &self.height, - datatype: RDF_UNSIGNED_LONG, - }), - }, - Triple { - subject: c_subject, - predicate: VC_HEADER_TIMESTAMP, - object: Term::Literal(Literal::Typed { - value: &self.timestamp, - datatype: RDF_UNSIGNED_LONG, - }), - }, - Triple { - subject: c_subject, - predicate: VC_HEADER_SENDER, - object: Term::Literal(Literal::Simple { - value: self.sender.as_str(), - }), - }, - Triple { - subject: c_subject, - predicate: VC_BODY_ISSUER, - object: Term::NamedNode(NamedNode { iri: self.issuer }), - }, - Triple { - subject: c_subject, - predicate: VC_BODY_TYPE, - object: Term::NamedNode(NamedNode { iri: self.r#type }), - }, - Triple { - subject: c_subject, - predicate: VC_BODY_VALID_FROM, - object: Term::Literal(Literal::Typed { - value: self.valid_from, - datatype: RDF_DATE_TYPE, - }), - }, - Triple { - subject: c_subject, - predicate: VC_BODY_SUBJECT, - object: Term::NamedNode(NamedNode { iri: self.claim.id }), - }, - ]; - - if let Some(tx_index) = &self.tx_index { - triples.push(Triple { - subject: c_subject, - predicate: VC_HEADER_TX, - object: Term::Literal(Literal::Typed { - value: tx_index, - datatype: RDF_UNSIGNED_INT, - }), - }); - } - - triples.extend(self.claim_as_triples(claim_node, named_issuer, blank_issuer)?); - - if let Some(valid_until) = self.valid_until { - triples.push(Triple { - subject: c_subject, - predicate: VC_BODY_VALID_UNTIL, - object: Term::Literal(Literal::Typed { - value: valid_until, - datatype: RDF_DATE_TYPE, - }), - }); - } - - Ok(triples) - } - - fn claim_as_triples( - &'a self, - claim_node: BlankNode<'a>, - named_issuer: &'a mut IdentifierIssuer, - blank_issuer: &'a mut IdentifierIssuer, - ) -> Result>, ContractError> { - // issue replacement identifiers for nodes - self.claim.content.iter().for_each(|q| { - match q.subject { - Subject::NamedNode(NamedNode { iri }) if iri != self.claim.id => { - named_issuer.get_or_issue(iri.to_string()); - } - Subject::BlankNode(BlankNode { id }) => { - blank_issuer.get_or_issue(id.to_string()); - } - _ => (), - }; - - if let Term::BlankNode(BlankNode { id }) = q.object { - blank_issuer.get_or_issue(id.to_string()); - } - }); - - let mut triples = self - .claim - .content - .iter() - .map(|q| { - let subject = match q.subject { - Subject::NamedNode(n) if n.iri == self.claim.id => { - Subject::BlankNode(claim_node) - } - Subject::NamedNode(n) if n.iri != self.claim.id => { - Subject::BlankNode(BlankNode { - id: named_issuer.get(n.iri).ok_or_else(|| { - ContractError::Unexpected( - "Could not replace named node, canonical identifier not found" - .to_string(), - ) - })?, - }) - } - Subject::BlankNode(BlankNode { id }) => Subject::BlankNode(BlankNode { - id: blank_issuer.get(id).ok_or_else(|| { - ContractError::Unexpected( - "Could not replace blank node, canonical identifier not found" - .to_string(), - ) - })?, - }), - _ => q.subject, - }; - let object = match q.object { - Term::NamedNode(n) => match named_issuer.get(n.iri) { - Some(id) => Term::BlankNode(BlankNode { id }), - None => Term::NamedNode(n), - }, - Term::BlankNode(BlankNode { id }) => Term::BlankNode(BlankNode { - id: blank_issuer.get(id).ok_or_else(|| { - ContractError::Unexpected( - "Could not replace blank node, canonical identifier not found" - .to_string(), - ) - })?, - }), - _ => q.object, - }; - - Ok(Triple { - subject, - predicate: q.predicate, - object, - }) - }) - .collect::>, ContractError>>()?; - - named_issuer - .issued_iter() - .for_each(|(original, (_, replacement))| { - triples.push(Triple { - subject: Subject::BlankNode(BlankNode { id: replacement }), - predicate: VC_CLAIM_ORIGINAL_NODE, - object: Term::NamedNode(NamedNode { iri: original }), - }); - }); - - triples.push(Triple { - subject: Subject::NamedNode(NamedNode { iri: self.id }), - predicate: VC_BODY_CLAIM, - object: Term::BlankNode(claim_node), - }); - - Ok(triples) - } - - fn contains_reserved_predicates(&self) -> bool { - self.claim - .content - .iter() - .predicates() - .any(|p| VC_RESERVED_PREDICATES.contains(&p)) - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::credential::vc::VerifiableCredential; - use crate::testutil::testutil; - use axone_rdf::dataset::Dataset; - use cosmwasm_std::testing::message_info; - use testing::addr::{addr, SENDER}; - use testing::mock::mock_env_addr; - - #[test] - fn proper_serialization() { - let owned_quads = testutil::read_test_quads("vc-valid.nq"); - let dataset = Dataset::from(owned_quads.as_slice()); - let vc = VerifiableCredential::try_from(&dataset).unwrap(); - let dc = - DataverseCredential::try_from((mock_env_addr(), message_info(&addr(SENDER), &[]), &vc)) - .unwrap(); - - let expected = r#" "12345"^^ . - "1571797419"^^ . - "cosmwasm1pgm8hyk0pvphmlvfjc8wsvk4daluz5tgrw6pu5mfpemk74uxnx9qlm3aqg" . - . - . - "2024-01-22T00:00:00"^^ . - . - "3"^^ . -_:c0 . -_:c0 "Cloud" . - _:c0 . - "2025-01-22T00:00:00"^^ . -"#; - - let serialization_res = dc.serialize(DataFormat::NQuads); - assert!(serialization_res.is_ok()); - - assert_eq!( - String::from_utf8(serialization_res.unwrap().to_vec()).unwrap(), - expected - ); - } - - #[test] - fn proper_named_hierarchy_serialization() { - let owned_quads = testutil::read_test_quads("vc-claim-hierarchy.nq"); - let dataset = Dataset::from(owned_quads.as_slice()); - let vc = VerifiableCredential::try_from(&dataset).unwrap(); - let dc = - DataverseCredential::try_from((mock_env_addr(), message_info(&addr(SENDER), &[]), &vc)) - .unwrap(); - - let expected = r#" "12345"^^ . - "1571797419"^^ . - "cosmwasm1pgm8hyk0pvphmlvfjc8wsvk4daluz5tgrw6pu5mfpemk74uxnx9qlm3aqg" . - . - . - "2024-01-22T00:00:00"^^ . - . - "3"^^ . -_:c0 . -_:c0 "Cloud" . -_:c0 _:a0 . -_:a0 "nested value" . -_:a0 . - _:c0 . - "2025-01-22T00:00:00"^^ . -"#; - - let serialization_res = dc.serialize(DataFormat::NQuads); - assert!(serialization_res.is_ok()); - - assert_eq!( - String::from_utf8(serialization_res.unwrap().to_vec()).unwrap(), - expected - ); - } - - #[test] - fn serialize_reserved_predicates() { - let owned_quads = testutil::read_test_quads("vc-unsupported-4.nq"); - let dataset = Dataset::from(owned_quads.as_slice()); - let vc = VerifiableCredential::try_from(&dataset).unwrap(); - let dc = - DataverseCredential::try_from((mock_env_addr(), message_info(&addr(SENDER), &[]), &vc)) - .unwrap(); - - let res = dc.serialize(DataFormat::NQuads); - assert!(res.is_err()); - if let ContractError::UnsupportedCredential(msg) = res.err().unwrap() { - assert_eq!(msg, "Claim contains reserved predicates.".to_string()); - } else { - assert!(false); - } - } -} diff --git a/contracts/axone-dataverse/src/registrar/registry.rs b/contracts/axone-dataverse/src/registrar/registry.rs deleted file mode 100644 index 092c7eb5..00000000 --- a/contracts/axone-dataverse/src/registrar/registry.rs +++ /dev/null @@ -1,65 +0,0 @@ -use crate::registrar::credential::DataverseCredential; -use crate::state::DATAVERSE; -use crate::ContractError; -use axone_cognitarium::msg::DataFormat; -use axone_cognitarium::parser::{ - Node, SelectItem, SelectQuery, TriplePattern, VarOrNamedNode, VarOrNode, VarOrNodeOrLiteral, - WhereClause, IRI, -}; -use axone_cognitarium_client::CognitariumClient; -use cosmwasm_std::{ensure, DepsMut, StdResult, Storage, WasmMsg}; - -/// ClaimRegistrar is the entity responsible to manage claims (i.e. submission and revocation) into -/// the Dataverse, ensuring that any pre-condition criteria to an action is met, and any attached -/// logic is properly executed. -pub struct ClaimRegistrar { - triplestore: CognitariumClient, -} - -impl ClaimRegistrar { - const RDF_DATA_FORMAT: DataFormat = DataFormat::NTriples; - - pub fn try_new(storage: &dyn Storage) -> StdResult { - DATAVERSE.load(storage).map(|dataverse| Self { - triplestore: CognitariumClient::new(dataverse.triplestore_address), - }) - } - - /// Checks if a credential exists in the triplestore by ID. - /// Returns `true` if at least one triple is found, `false` otherwise. - pub fn exists(&self, deps: &DepsMut<'_>, credential_id: &str) -> Result { - let query = SelectQuery { - prefixes: Vec::new(), - limit: Some(1), - select: vec![SelectItem::Variable("p".into())], - r#where: WhereClause::Bgp { - patterns: vec![TriplePattern { - subject: VarOrNode::Node(Node::NamedNode(IRI::Full(credential_id.into()))), - predicate: VarOrNamedNode::Variable("p".into()), - object: VarOrNodeOrLiteral::Variable("o".into()), - }], - }, - }; - - let response = self.triplestore.select(deps.querier, query)?; - Ok(!response.results.bindings.is_empty()) - } - - pub fn submit_claim( - &self, - deps: &DepsMut<'_>, - credential: &DataverseCredential<'_>, - ) -> Result { - ensure!( - !self.exists(deps, credential.id)?, - ContractError::CredentialAlreadyExists(credential.id.to_string()) - ); - - self.triplestore - .insert_data( - Some(Self::RDF_DATA_FORMAT), - credential.serialize((&Self::RDF_DATA_FORMAT).into())?, - ) - .map_err(ContractError::from) - } -} diff --git a/contracts/axone-dataverse/src/state.rs b/contracts/axone-dataverse/src/state.rs deleted file mode 100644 index 5cb50541..00000000 --- a/contracts/axone-dataverse/src/state.rs +++ /dev/null @@ -1,11 +0,0 @@ -use cosmwasm_std::Addr; -use cw_storage_plus::Item; -use serde::{Deserialize, Serialize}; - -pub const DATAVERSE: Item = Item::new("dataverse"); - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct Dataverse { - pub name: String, - pub triplestore_address: Addr, -} diff --git a/contracts/axone-dataverse/src/testutil.rs b/contracts/axone-dataverse/src/testutil.rs deleted file mode 100644 index 6369e1fc..00000000 --- a/contracts/axone-dataverse/src/testutil.rs +++ /dev/null @@ -1,31 +0,0 @@ -#[cfg(test)] -pub mod testutil { - use axone_rdf::owned_model::OwnedQuad; - use axone_rdf::serde::NQuadsReader; - use std::env; - use std::fs::File; - use std::io::{BufReader, Read}; - use std::path::Path; - - pub fn read_test_quads(file: &str) -> Vec { - let raw_rdf = read_test_data(file); - let buffer = BufReader::new(raw_rdf.as_slice()); - let mut reader = NQuadsReader::new(buffer); - reader.read_all().unwrap() - } - - pub fn read_test_data(file: &str) -> Vec { - let mut bytes: Vec = Vec::new(); - - File::open( - Path::new(&env::var("CARGO_MANIFEST_DIR").unwrap()) - .join("testdata") - .join(file), - ) - .unwrap() - .read_to_end(&mut bytes) - .unwrap(); - - bytes - } -} diff --git a/contracts/axone-dataverse/testdata/proof-ed255192020-ok.nq b/contracts/axone-dataverse/testdata/proof-ed255192020-ok.nq deleted file mode 100644 index 3cac71bf..00000000 --- a/contracts/axone-dataverse/testdata/proof-ed255192020-ok.nq +++ /dev/null @@ -1,5 +0,0 @@ -_:b1 "2023-11-29T10:07:56Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "z5UT4w3v6uSJ3srR3ZFSZBbgjaMRyEUaaGdnZzEb2oc1YTskkpff9qYt2GiTDuU2wqEh3f99YvWubPuqVNWrn9hNx"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/proof-ed255192020-options.nq b/contracts/axone-dataverse/testdata/proof-ed255192020-options.nq deleted file mode 100644 index 6a0130c7..00000000 --- a/contracts/axone-dataverse/testdata/proof-ed255192020-options.nq +++ /dev/null @@ -1,4 +0,0 @@ -_:b1 "2023-11-29T10:07:56Z"^^ . -_:b1 . -_:b1 . -_:b1 . diff --git a/contracts/axone-dataverse/testdata/proof-invalid-pkey.nq b/contracts/axone-dataverse/testdata/proof-invalid-pkey.nq deleted file mode 100644 index 2a5bf7fd..00000000 --- a/contracts/axone-dataverse/testdata/proof-invalid-pkey.nq +++ /dev/null @@ -1,5 +0,0 @@ -_:b1 "2023-11-29T10:07:56Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "z5UT4w3v6uSJ3srR3ZFSZBbgjaMRyEUaaGdnZzEb2oc1YTskkpff9qYt2GiTDuU2wqEh3f99YvWubPuqVNWrn9hNx"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/proof-malformed-value.nq b/contracts/axone-dataverse/testdata/proof-malformed-value.nq deleted file mode 100644 index bcae5f95..00000000 --- a/contracts/axone-dataverse/testdata/proof-malformed-value.nq +++ /dev/null @@ -1,5 +0,0 @@ -_:b1 "2023-11-29T10:07:56Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "5UT4w3v6uSJ3srR3ZFSZBbgjaMRyEUaaGdnZzEb2oc1YTskkpff9qYt2GiTDuU2wqEh3f99YvWubPuqVNWrn9hNx"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/proof-malformed.nq b/contracts/axone-dataverse/testdata/proof-malformed.nq deleted file mode 100644 index acc18ffd..00000000 --- a/contracts/axone-dataverse/testdata/proof-malformed.nq +++ /dev/null @@ -1,5 +0,0 @@ -_:b1 "2023-11-29T10:07:56Z"^^ _:b0 . -_:b1 "malformed" _:b0 . -_:b1 _:b0 . -_:b1 "z5UT4w3v6uSJ3srR3ZFSZBbgjaMRyEUaaGdnZzEb2oc1YTskkpff9qYt2GiTDuU2wqEh3f99YvWubPuqVNWrn9hNx"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/proof-missing-created.nq b/contracts/axone-dataverse/testdata/proof-missing-created.nq deleted file mode 100644 index aecc8b15..00000000 --- a/contracts/axone-dataverse/testdata/proof-missing-created.nq +++ /dev/null @@ -1,4 +0,0 @@ -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "z5UT4w3v6uSJ3srR3ZFSZBbgjaMRyEUaaGdnZzEb2oc1YTskkpff9qYt2GiTDuU2wqEh3f99YvWubPuqVNWrn9hNx"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/proof-missing-method.nq b/contracts/axone-dataverse/testdata/proof-missing-method.nq deleted file mode 100644 index cfad9d51..00000000 --- a/contracts/axone-dataverse/testdata/proof-missing-method.nq +++ /dev/null @@ -1,4 +0,0 @@ -_:b1 "2023-11-29T10:07:56Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "z5UT4w3v6uSJ3srR3ZFSZBbgjaMRyEUaaGdnZzEb2oc1YTskkpff9qYt2GiTDuU2wqEh3f99YvWubPuqVNWrn9hNx"^^ _:b0 . diff --git a/contracts/axone-dataverse/testdata/proof-missing-purpose.nq b/contracts/axone-dataverse/testdata/proof-missing-purpose.nq deleted file mode 100644 index d2850853..00000000 --- a/contracts/axone-dataverse/testdata/proof-missing-purpose.nq +++ /dev/null @@ -1,4 +0,0 @@ -_:b1 "2023-11-29T10:07:56Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 "z5UT4w3v6uSJ3srR3ZFSZBbgjaMRyEUaaGdnZzEb2oc1YTskkpff9qYt2GiTDuU2wqEh3f99YvWubPuqVNWrn9hNx"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/proof-missing-type.nq b/contracts/axone-dataverse/testdata/proof-missing-type.nq deleted file mode 100644 index 14a1cd0e..00000000 --- a/contracts/axone-dataverse/testdata/proof-missing-type.nq +++ /dev/null @@ -1,4 +0,0 @@ -_:b1 "2023-11-29T10:07:56Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 "z5UT4w3v6uSJ3srR3ZFSZBbgjaMRyEUaaGdnZzEb2oc1YTskkpff9qYt2GiTDuU2wqEh3f99YvWubPuqVNWrn9hNx"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/proof-missing-value.nq b/contracts/axone-dataverse/testdata/proof-missing-value.nq deleted file mode 100644 index e40227b6..00000000 --- a/contracts/axone-dataverse/testdata/proof-missing-value.nq +++ /dev/null @@ -1,4 +0,0 @@ -_:b1 "2023-11-29T10:07:56Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/proof-unsupported.nq b/contracts/axone-dataverse/testdata/proof-unsupported.nq deleted file mode 100644 index a3e34ea3..00000000 --- a/contracts/axone-dataverse/testdata/proof-unsupported.nq +++ /dev/null @@ -1,5 +0,0 @@ -_:b1 "2023-11-29T10:07:56Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "z5UT4w3v6uSJ3srR3ZFSZBbgjaMRyEUaaGdnZzEb2oc1YTskkpff9qYt2GiTDuU2wqEh3f99YvWubPuqVNWrn9hNx"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/vc-claim-hierarchy.nq b/contracts/axone-dataverse/testdata/vc-claim-hierarchy.nq deleted file mode 100644 index 5f849974..00000000 --- a/contracts/axone-dataverse/testdata/vc-claim-hierarchy.nq +++ /dev/null @@ -1,16 +0,0 @@ - . - "Cloud" . - . - "nested value" . - . - . - . - "2024-01-22T00:00:00"^^ . - "2025-01-22T00:00:00"^^ . - . - _:b0 . -_:b1 "2024-02-01T17:46:53.676947Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "z3WboEDRwsWokH8vQrveVWbg6fQnqhHfhrkGHT9tyG2GYgzQVZ9zFW6eK2ZNcnGhydqXWDwwTsZq29e7cHJkbnVkF"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/vc-di-ed-ok.nq b/contracts/axone-dataverse/testdata/vc-di-ed-ok.nq deleted file mode 100644 index ea572664..00000000 --- a/contracts/axone-dataverse/testdata/vc-di-ed-ok.nq +++ /dev/null @@ -1,11 +0,0 @@ - . - _:b0 . - . - "2023-05-01T06:09:10Z"^^ . - . -_:b1 "2024-02-07T13:24:37.636307Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 "eddsa-2022" _:b0 . -_:b1 _:b0 . -_:b1 "z4JEgstWKYH2UxQK7VA6tYumr9XpEYUr66FdW4BtbbMBfxo3khn3ueHTAEL6c7EL9FU1pecZ471PkuRNSwHtNx7dz"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/vc-ecdsa-2019-ok.nq b/contracts/axone-dataverse/testdata/vc-ecdsa-2019-ok.nq deleted file mode 100644 index 96a010f5..00000000 --- a/contracts/axone-dataverse/testdata/vc-ecdsa-2019-ok.nq +++ /dev/null @@ -1,10 +0,0 @@ - . - _:b0 . - . - "2023-05-01T06:09:10Z"^^ . - . -_:b1 "2024-02-08T17:44:07.477489Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 "eyJhbGciOiJFUzI1NksiLCJjcml0IjpbImI2NCJdLCJiNjQiOmZhbHNlfQ..Zhj537ApzBdQqfMKe2J9-I6CiNKzY6O2MZMrGwA2KZxMBSt81ExQJm2AKfX39OzeLn_gkP9Gmn_Bb_Yz2jFgtA" _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/vc-eddsa-2018-ok.nq b/contracts/axone-dataverse/testdata/vc-eddsa-2018-ok.nq deleted file mode 100644 index ef3d8962..00000000 --- a/contracts/axone-dataverse/testdata/vc-eddsa-2018-ok.nq +++ /dev/null @@ -1,10 +0,0 @@ - . - _:b0 . - . - "2023-05-01T06:09:10Z"^^ . - . -_:b1 "2024-02-21T20:17:42.150598Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 "eyJhbGciOiJFZERTQSIsImNyaXQiOlsiYjY0Il0sImI2NCI6ZmFsc2V9..5YzyqmQp1yPAea1WgKYzXOWTdYiDJO5iZs3bjxnSCzJYZS-ToIIqL4T47Ni7zZpc8S968vPKdCZcQzkoNnIIDw" _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/vc-eddsa-2020-ok-unsecured-trusted.nq b/contracts/axone-dataverse/testdata/vc-eddsa-2020-ok-unsecured-trusted.nq deleted file mode 100644 index 5b063e12..00000000 --- a/contracts/axone-dataverse/testdata/vc-eddsa-2020-ok-unsecured-trusted.nq +++ /dev/null @@ -1,9 +0,0 @@ - _:b2 . - . - . - . - "2026-02-16T00:00:00Z"^^ . - "2024-02-16T00:00:00Z"^^ . - . -_:b2 "Bachelor of Science and Arts"^^ . -_:b2 . diff --git a/contracts/axone-dataverse/testdata/vc-eddsa-2020-ok-unsecured.nq b/contracts/axone-dataverse/testdata/vc-eddsa-2020-ok-unsecured.nq deleted file mode 100644 index dbe80a33..00000000 --- a/contracts/axone-dataverse/testdata/vc-eddsa-2020-ok-unsecured.nq +++ /dev/null @@ -1,9 +0,0 @@ - _:b2 . - . - . - . - "2026-02-16T00:00:00Z"^^ . - "2024-02-16T00:00:00Z"^^ . - . -_:b2 "Bachelor of Science and Arts"^^ . -_:b2 . diff --git a/contracts/axone-dataverse/testdata/vc-eddsa-2020-ok.nq b/contracts/axone-dataverse/testdata/vc-eddsa-2020-ok.nq deleted file mode 100644 index 4c0a3964..00000000 --- a/contracts/axone-dataverse/testdata/vc-eddsa-2020-ok.nq +++ /dev/null @@ -1,15 +0,0 @@ - _:b2 . - . - . - _:b0 . - . - "2026-02-16T00:00:00Z"^^ . - "2024-02-16T00:00:00Z"^^ . - . -_:b1 "2024-02-16T17:35:56.668169Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "zUuTPsT5aKs53ciMY6qEj2dqZxK4XnLoZhX26amB9GMCMhfcTmLbtndcW5JS4gUqPkxGxsCmZCKuvkFnDgrGFrWD"^^ _:b0 . -_:b1 _:b0 . -_:b2 "Bachelor of Science and Arts"^^ . -_:b2 . diff --git a/contracts/axone-dataverse/testdata/vc-unsupported-1.nq b/contracts/axone-dataverse/testdata/vc-unsupported-1.nq deleted file mode 100644 index 6b709c96..00000000 --- a/contracts/axone-dataverse/testdata/vc-unsupported-1.nq +++ /dev/null @@ -1,14 +0,0 @@ - _:b2 . - . - _:b0 . - . - "2026-02-16T00:00:00Z"^^ . - "2024-02-16T00:00:00Z"^^ . - . -_:b1 "2024-02-17T13:32:14.814613Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "z5vGstniEMfyk5riV1UQvFXhXzdcbZ1978JFGdn1H2wTdp6qvxqDuw5xg8M33hjdZTG6zGuCbAhgCqf7R2CkhVRp5"^^ _:b0 . -_:b1 _:b0 . -_:b2 "Bachelor of Science and Arts"^^ . -_:b2 . diff --git a/contracts/axone-dataverse/testdata/vc-unsupported-2.nq b/contracts/axone-dataverse/testdata/vc-unsupported-2.nq deleted file mode 100644 index 4eff78d8..00000000 --- a/contracts/axone-dataverse/testdata/vc-unsupported-2.nq +++ /dev/null @@ -1,15 +0,0 @@ - . - "Cloud" . - . - . - . - . - "2024-01-22T00:00:00"^^ . - "2025-01-22T00:00:00"^^ . - . - _:b0 . -_:b1 "2024-02-01T17:46:53.676947Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "z3WboEDRwsWokH8vQrveVWbg6fQnqhHfhrkGHT9tyG2GYgzQVZ9zFW6eK2ZNcnGhydqXWDwwTsZq29e7cHJkbnVkF"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/vc-unsupported-3.nq b/contracts/axone-dataverse/testdata/vc-unsupported-3.nq deleted file mode 100644 index 1cfb1731..00000000 --- a/contracts/axone-dataverse/testdata/vc-unsupported-3.nq +++ /dev/null @@ -1,16 +0,0 @@ - . - "Cloud" . - "Cloud" . - . - . - . - . - "2024-01-22T00:00:00"^^ . - "2025-01-22T00:00:00"^^ . - . - _:b0 . -_:b1 "2024-02-01T17:46:53.676947Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "z3WboEDRwsWokH8vQrveVWbg6fQnqhHfhrkGHT9tyG2GYgzQVZ9zFW6eK2ZNcnGhydqXWDwwTsZq29e7cHJkbnVkF"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/vc-unsupported-4.nq b/contracts/axone-dataverse/testdata/vc-unsupported-4.nq deleted file mode 100644 index 270513e3..00000000 --- a/contracts/axone-dataverse/testdata/vc-unsupported-4.nq +++ /dev/null @@ -1,15 +0,0 @@ - . - "Cloud" . - "this shall not be allowed" . - . - . - . - "2024-01-22T00:00:00"^^ . - "2025-01-22T00:00:00"^^ . - . - _:b0 . -_:b1 "2024-02-01T17:46:53.676947Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "z3WboEDRwsWokH8vQrveVWbg6fQnqhHfhrkGHT9tyG2GYgzQVZ9zFW6eK2ZNcnGhydqXWDwwTsZq29e7cHJkbnVkF"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-dataverse/testdata/vc-valid.nq b/contracts/axone-dataverse/testdata/vc-valid.nq deleted file mode 100644 index a331b668..00000000 --- a/contracts/axone-dataverse/testdata/vc-valid.nq +++ /dev/null @@ -1,14 +0,0 @@ - . - "Cloud" . - . - . - . - "2024-01-22T00:00:00"^^ . - "2025-01-22T00:00:00"^^ . - . - _:b0 . -_:b1 "2024-02-01T17:46:53.676947Z"^^ _:b0 . -_:b1 _:b0 . -_:b1 _:b0 . -_:b1 "z3WboEDRwsWokH8vQrveVWbg6fQnqhHfhrkGHT9tyG2GYgzQVZ9zFW6eK2ZNcnGhydqXWDwwTsZq29e7cHJkbnVkF"^^ _:b0 . -_:b1 _:b0 . diff --git a/contracts/axone-law-stone/Cargo.toml b/contracts/axone-dummy/Cargo.toml similarity index 55% rename from contracts/axone-law-stone/Cargo.toml rename to contracts/axone-dummy/Cargo.toml index b0d4820b..9f77fce9 100644 --- a/contracts/axone-law-stone/Cargo.toml +++ b/contracts/axone-dummy/Cargo.toml @@ -1,11 +1,11 @@ [package] authors = { workspace = true } -description = "The Smart Contract providing Governance as a Service." +description = "Placeholder CosmWasm contract kept during refactor." edition = { workspace = true } homepage = { workspace = true } keywords = { workspace = true } license = { workspace = true } -name = "axone-law-stone" +name = "axone-dummy" repository = { workspace = true } rust-version = { workspace = true } version = { workspace = true } @@ -16,28 +16,13 @@ exclude = [ "hash.txt", ] -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [lib] crate-type = ["cdylib", "rlib"] [dependencies] -axone-logic-bindings.workspace = true -axone-objectarium.workspace = true -axone-objectarium-client.workspace = true -axone-wasm.workspace = true +axone-dummy-lib = { path = "../../packages/axone-dummy-lib", version = "8.0.0" } cosmwasm-schema.workspace = true cosmwasm-std.workspace = true -cw-storage-plus.workspace = true -cw-utils.workspace = true -cw2.workspace = true -itertools = "0.14.0" -serde.workspace = true -thiserror.workspace = true - -[dev-dependencies] -testing.workspace = true -url = "2.5.7" [features] # use library feature to disable all instantiate/execute/query exports diff --git a/contracts/axone-cognitarium/Makefile.toml b/contracts/axone-dummy/Makefile.toml similarity index 100% rename from contracts/axone-cognitarium/Makefile.toml rename to contracts/axone-dummy/Makefile.toml diff --git a/contracts/axone-dummy/README.md b/contracts/axone-dummy/README.md new file mode 100644 index 00000000..d549f94c --- /dev/null +++ b/contracts/axone-dummy/README.md @@ -0,0 +1,3 @@ +# Axone Dummy Contract + +This is a simple dummy contract kept as a placeholder during the refactoring of the AXONE protocol contracts. It does not implement any real functionality. diff --git a/contracts/axone-dataverse/src/bin/schema.rs b/contracts/axone-dummy/src/bin/schema.rs similarity index 71% rename from contracts/axone-dataverse/src/bin/schema.rs rename to contracts/axone-dummy/src/bin/schema.rs index 3d0fdadc..4d026d8d 100644 --- a/contracts/axone-dataverse/src/bin/schema.rs +++ b/contracts/axone-dummy/src/bin/schema.rs @@ -1,6 +1,6 @@ use cosmwasm_schema::write_api; -use axone_dataverse::msg::{ExecuteMsg, InstantiateMsg, QueryMsg}; +use axone_dummy::msg::{ExecuteMsg, InstantiateMsg, QueryMsg}; fn main() { write_api! { diff --git a/contracts/axone-dummy/src/contract.rs b/contracts/axone-dummy/src/contract.rs new file mode 100644 index 00000000..e42747e2 --- /dev/null +++ b/contracts/axone-dummy/src/contract.rs @@ -0,0 +1,38 @@ +#[cfg(not(feature = "library"))] +use cosmwasm_std::entry_point; +use cosmwasm_std::{to_json_binary, Binary, Deps, DepsMut, Env, MessageInfo, Response, StdResult}; + +use crate::msg::{BarResponse, ExecuteMsg, InstantiateMsg, QueryMsg}; + +#[cfg_attr(not(feature = "library"), entry_point)] +pub fn instantiate( + _deps: DepsMut<'_>, + _env: Env, + _info: MessageInfo, + _msg: InstantiateMsg, +) -> StdResult { + Ok(Response::new() + .add_attribute("contract", "axone-dummy") + .add_attribute("greeting", axone_dummy_lib::greeting())) +} + +#[cfg_attr(not(feature = "library"), entry_point)] +pub fn execute( + _deps: DepsMut<'_>, + _env: Env, + _info: MessageInfo, + msg: ExecuteMsg, +) -> StdResult { + match msg { + ExecuteMsg::Foo {} => Ok(Response::new() + .add_attribute("action", "foo") + .add_attribute("greeting", axone_dummy_lib::greeting())), + } +} + +#[cfg_attr(not(feature = "library"), entry_point)] +pub fn query(_deps: Deps<'_>, _env: Env, _msg: QueryMsg) -> StdResult { + to_json_binary(&BarResponse { + msg: axone_dummy_lib::greeting().to_string(), + }) +} diff --git a/contracts/axone-dummy/src/lib.rs b/contracts/axone-dummy/src/lib.rs new file mode 100644 index 00000000..7760f8d9 --- /dev/null +++ b/contracts/axone-dummy/src/lib.rs @@ -0,0 +1,4 @@ +pub mod contract; +pub mod msg; + +pub use crate::contract::{execute, instantiate, query}; diff --git a/contracts/axone-dummy/src/msg.rs b/contracts/axone-dummy/src/msg.rs new file mode 100644 index 00000000..faabd3d7 --- /dev/null +++ b/contracts/axone-dummy/src/msg.rs @@ -0,0 +1,28 @@ +use cosmwasm_schema::{cw_serde, QueryResponses}; + +/// Instantiate message +#[cw_serde] +pub struct InstantiateMsg {} + +/// Execute messages +#[cw_serde] +pub enum ExecuteMsg { + /// # Foo + Foo, +} + +/// Query messages +#[cw_serde] +#[derive(QueryResponses)] +pub enum QueryMsg { + /// # Bar + #[returns(BarResponse)] + Bar { msg: String }, +} + +/// # BarResponse +#[cw_serde] +pub struct BarResponse { + /// The message value + pub msg: String, +} diff --git a/contracts/axone-law-stone/Makefile.toml b/contracts/axone-law-stone/Makefile.toml deleted file mode 100644 index 944012e8..00000000 --- a/contracts/axone-law-stone/Makefile.toml +++ /dev/null @@ -1,12 +0,0 @@ -[tasks.generate_schema] -args = ["run", "--bin", "schema"] -command = "cargo" - -[tasks.schema] -dependencies = ["generate_schema"] -script = ''' -SCHEMA=$(find schema -type f -maxdepth 1 -name '*.json' -print0) -TITLE=$(jq -r .contract_name $SCHEMA) -jq --arg description "$(cat README.md)" '. + {description: $description}' $SCHEMA > $SCHEMA.tmp && mv $SCHEMA.tmp $SCHEMA -jq --arg title $TITLE '. + {title: $title}' $SCHEMA > $SCHEMA.tmp && mv $SCHEMA.tmp $SCHEMA -''' diff --git a/contracts/axone-law-stone/README.md b/contracts/axone-law-stone/README.md deleted file mode 100644 index 7112da08..00000000 --- a/contracts/axone-law-stone/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# Law Stone - -## Overview - -The `axone-law-stone` smart contract aims to provide GaaS (i.e. Governance as a Service) in any [Cosmos blockchains](https://cosmos.network/) using the [CosmWasm](https://cosmwasm.com/) framework and the [Logic](https://docs.axone.xyz/modules/next/logic) AXONE module. - -This contract is built around a Prolog program describing the law by rules and facts. The law stone is immutable, this means it can only be questioned, there are no update mechanisms. - -The `axone-law-stone` responsibility is to guarantee the availability of its rules in order to question them, but not to ensure the rules application. - -To ensure reliability over time, the associated Prolog program is stored and pinned in a `axone-objectarium` contract. Moreover, all the eventual loaded files must be stored in a `axone-objectarium` contract as well, allowing the contract to pin them. - -To be able to free the underlying resources (i.e. objects in `axone-objectarium`) if not used anymore, the contract admin can break the stone. - -➡️ Checkout the [examples](https://github.com/axone-protocol/contracts/tree/main/contracts/axone-law-stone/examples/) for usage information. diff --git a/contracts/axone-law-stone/examples/multiple-sources/README.md b/contracts/axone-law-stone/examples/multiple-sources/README.md deleted file mode 100644 index 6bddd3cd..00000000 --- a/contracts/axone-law-stone/examples/multiple-sources/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# Multiple source - -When executed by the logic module, a Prolog program can load other programs through the `consult(File).` predicate. This example aims to illustrate this case, when the law is composed of multiple Prolog sources. - -## The Program - -We'll reuse the exact same story as the [single-source](../single-source/README.md) example, we'll just split the program in two: - -- `template.pl`: Contains the governance rules predicates, which can be customized by defining configuration predicates; -- `gov.pl`: Load `template.pl` and define configuration predicates. - -⚠️ A special attention must be brought to the template loading with the `consult(File).` predicate. - -The Logic module expects the `File` variable to be an URI so it can resolve its content. Through the `cosmwasm` prefix it can loads data from any smart contract query, we'll configure the URI to perform a `axone-objectarium` `ObjectData` query in order to load the `template.pl`. - -The URI has the following form: - -```bash -cosmwasm:{contract_name}:{contract_address}?query={contract_query} -``` - -Where: - -- `{contract_name}`: Only informative, represents the corresponding smart contract name or type (e.g. `axone-objectarium`); -- `{contract_address}`: The smart contract to query, concerning the `axone-law-stone` it must be a `axone-objectarium` contract; -- `{contract_query}`: The JSON query to perform on the targeted smart contract, URL encoded. In our case an `ObjectData` query, for example: `%7B%22object_data%22%3A%7B%22id%22%3A%22b118d79b4a368028b34d564448e5f1082e098613434370f3c15d6a2bf9979dfc%22%7D%7D`; - -## Instantiate - -First the `template.pl` program must be stored on a `axone-objectarium` and the `gov.pl` updated with the right URI in the `consult(File).` predicate, the URI should be in the form: - -```bash -cosmwasm:axone-objectarium:${STORAGE_ADDRESS}?query=%7B%22object_data%22%3A%7B%22id%22%3A%22b118d79b4a368028b34d564448e5f1082e098613434370f3c15d6a2bf9979dfc%22%7D%7D -``` - -The instantiate will take as parameters the base64 encoded program and the address of a `axone-objectarium` contract, on which the program will be stored and pinned, the `template.pl` object will also be pinned to ensure all the needed resources stays available: - -```bash -axoned tx wasm instantiate $CODE_ID \ - --label "multiple-source" \ - --from $ADDR \ - --admin $ADMIN_ADDR \ - --gas 1000000 \ - "{\"program\":\"$(cat gov.pl | base64)\", \"storage_address\": \"$STORAGE_ADDR\"}" -``` - -You can retrieve the new `axone-law-stone` smart contract address in the `_contract_address` instantiate attribute of the transaction. - -## Query - -By using the `Ask` query we can provide Prolog predicates to be evaluated againsts the underlying programs: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"ask\": {\"query\": \"can('change_governance', 'did:example:axone1p8u47en82gmzfm259y6z93r9qe63l25d858vqu').\"}}" -``` - -## Break - -Only the smart contract admin can break the stone, if any. - -The program stored in the `axone-objectarium` smart contract will be removed, or at least un-pinned. And the `template.pl` object will be un pinned. - -By breaking the stone, you will not be able to query it anymore. - -```bash -axoned tx wasm execute $CONTRACT_ADDR \ - --from $ADDR \ - --gas 1000000 \ - '"break_stone"' -``` diff --git a/contracts/axone-law-stone/examples/multiple-sources/gov.pl b/contracts/axone-law-stone/examples/multiple-sources/gov.pl deleted file mode 100644 index 969413c5..00000000 --- a/contracts/axone-law-stone/examples/multiple-sources/gov.pl +++ /dev/null @@ -1,11 +0,0 @@ -:- consult('cosmwasm:axone-objectarium:${STORAGE_ADDRESS}?query=%7B%22object_data%22%3A%7B%22id%22%3A%22b118d79b4a368028b34d564448e5f1082e098613434370f3c15d6a2bf9979dfc%22%7D%7D'). - -admin_addr('axone1p8u47en82gmzfm259y6z93r9qe63l25d858vqu'). - -allow_denom('uaxone'). -allow_did_method('example'). -allow_addr(Addr) :- bech32_address(-('axone', _), Addr). - -min_exec_workflow_amount(1000000). -min_create_dataset_amount(10000). -min_create_service_amount(100000). diff --git a/contracts/axone-law-stone/examples/multiple-sources/template.pl b/contracts/axone-law-stone/examples/multiple-sources/template.pl deleted file mode 100644 index 6becce5a..00000000 --- a/contracts/axone-law-stone/examples/multiple-sources/template.pl +++ /dev/null @@ -1,28 +0,0 @@ -valid_did(DID, Addr) :- - did_components(DID, did_components(Method, Addr, _, _, _)), - allow_did_method(Method), - allow_addr(Addr). - -min_amount(exec_workflow, MinAmount) :- - min_exec_workflow_amount(MinAmount). - -min_amount(create_dataset, MinAmount) :- - min_create_dataset_amount(MinAmount). - -min_amount(create_service, MinAmount) :- - min_create_service_amount(MinAmount). - -has_sufficient_balance(Addr, MinAmount) :- - bank_spendable_balances(Addr, Balances), - member(Denom-Amount, Balances), - allow_denom(Denom), - Amount @>= MinAmount. - -can(change_governance, DID) :- - valid_did(DID, Addr), - admin_addr(Addr). - -can(Action, DID) :- - valid_did(DID, Addr), - min_amount(Action, MinAmount), - has_sufficient_balance(Addr, MinAmount). diff --git a/contracts/axone-law-stone/examples/single-source/README.md b/contracts/axone-law-stone/examples/single-source/README.md deleted file mode 100644 index a74206e2..00000000 --- a/contracts/axone-law-stone/examples/single-source/README.md +++ /dev/null @@ -1,55 +0,0 @@ -# Single source - -This example aims to illustrate the most simple case of the `axone-law-stone`: The law is composed of only one Prolog source program. - -## The Program - -The spirit here is to provide a `axone-law-stone` smart contract instance providing rules similar in form to Dataspace governance rules. - -You'll find in the [gov.pl](gov.pl) Prolog program some predicates defining the rules allowing to perform some typical Dataspaces actions. - -The `can(Action, DID)` predicate will allow or not an action for a `did` (i.e. Decentralized Identifier), a `did` being expected to have the form: `did:example:${AXONE_ADDRESS}`. We can describe the action rules as follows: - -- `change_governance`: Only the did admin can do it: `did:example:axone1p8u47en82gmzfm259y6z93r9qe63l25d858vqu`; -- `exec_workflow`: Only a valid DID having a minimum spendable of `1000000uaxone`; -- `create_dataset` Only a valid DID having a minimum spendable of `10000uaxone`; -- `create_service` Only a valid DID having a minimum spendable of `100000uaxone`; - -## Instantiate - -The instantiate will take as parameters the base64 encoded program and the address of a `axone-objectarium` contract, on which the program will be stored and pinned to prevent its removal and thus ensure its availability: - -```bash -axoned tx wasm instantiate $CODE_ID \ - --label "single-source" \ - --from $ADDR \ - --admin $ADMIN_ADDR \ - --gas 1000000 \ - "{\"program\":\"$(cat gov.pl | base64)\", \"storage_address\": \"$STORAGE_ADDR\"}" -``` - -You can retrieve the new `axone-law-stone` smart contract address in the `_contract_address` instantiate attribute of the transaction. - -## Query - -By using the `Ask` query we can provide Prolog predicates to be evaluated againsts the underlying program: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"ask\": {\"query\": \"can('change_governance', 'did:example:axone1p8u47en82gmzfm259y6z93r9qe63l25d858vqu').\"}}" -``` - -## Break - -Only the smart contract admin can break the stone, if any. - -The program stored in the `axone-objectarium` smart contract will be removed, or at least un-pinned. - -By breaking the stone, you will not be able to query it anymore. - -```bash -axoned tx wasm execute $CONTRACT_ADDR \ - --from $ADDR \ - --gas 1000000 \ - '"break_stone"' -``` diff --git a/contracts/axone-law-stone/examples/single-source/gov.pl b/contracts/axone-law-stone/examples/single-source/gov.pl deleted file mode 100644 index 791fa8f1..00000000 --- a/contracts/axone-law-stone/examples/single-source/gov.pl +++ /dev/null @@ -1,38 +0,0 @@ -admin_addr('axone1p8u47en82gmzfm259y6z93r9qe63l25d858vqu'). - -allow_denom('uaxone'). -allow_did_method('example'). -allow_addr(Addr) :- bech32_address(-('axone', _), Addr). - -valid_did(DID, Addr) :- - did_components(DID, did_components(Method, Addr, _, _, _)), - allow_did_method(Method), - allow_addr(Addr). - -min_exec_workflow_amount(1000000). -min_create_dataset_amount(10000). -min_create_service_amount(100000). - -min_amount(exec_workflow, MinAmount) :- - min_exec_workflow_amount(MinAmount). - -min_amount(create_dataset, MinAmount) :- - min_create_dataset_amount(MinAmount). - -min_amount(create_service, MinAmount) :- - min_create_service_amount(MinAmount). - -has_sufficient_balance(Addr, MinAmount) :- - bank_spendable_balances(Addr, Balances), - member(Denom-Amount, Balances), - allow_denom(Denom), - Amount @>= MinAmount. - -can(change_governance, DID) :- - valid_did(DID, Addr), - admin_addr(Addr). - -can(Action, DID) :- - valid_did(DID, Addr), - min_amount(Action, MinAmount), - has_sufficient_balance(Addr, MinAmount). diff --git a/contracts/axone-law-stone/src/bin/schema.rs b/contracts/axone-law-stone/src/bin/schema.rs deleted file mode 100644 index 6883628f..00000000 --- a/contracts/axone-law-stone/src/bin/schema.rs +++ /dev/null @@ -1,11 +0,0 @@ -use cosmwasm_schema::write_api; - -use axone_law_stone::msg::{ExecuteMsg, InstantiateMsg, QueryMsg}; - -fn main() { - write_api! { - instantiate: InstantiateMsg, - execute: ExecuteMsg, - query: QueryMsg, - } -} diff --git a/contracts/axone-law-stone/src/contract.rs b/contracts/axone-law-stone/src/contract.rs deleted file mode 100644 index a6e67dc9..00000000 --- a/contracts/axone-law-stone/src/contract.rs +++ /dev/null @@ -1,1165 +0,0 @@ -#[cfg(not(feature = "library"))] -use cosmwasm_std::entry_point; -use cosmwasm_std::{ - to_json_binary, Binary, Deps, DepsMut, Env, MessageInfo, Reply, Response, StdResult, SubMsg, - WasmMsg, -}; -use cw2::set_contract_version; -use cw_utils::nonpayable; - -use axone_logic_bindings::LogicCustomQuery; -use axone_objectarium::msg::{ExecuteMsg as StorageMsg, QueryMsg as StorageQuery}; -use axone_objectarium_client::ObjectRef; - -use crate::error::ContractError; -use crate::msg::{ExecuteMsg, InstantiateMsg, QueryMsg}; - -// version info for migration info -const CONTRACT_NAME: &str = concat!("crates.io:", env!("CARGO_PKG_NAME")); -const CONTRACT_VERSION: &str = env!("CARGO_PKG_VERSION"); - -const STORE_PROGRAM_REPLY_ID: u64 = 1; - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn instantiate( - deps: DepsMut<'_, LogicCustomQuery>, - _env: Env, - info: MessageInfo, - msg: InstantiateMsg, -) -> Result { - nonpayable(&info)?; - set_contract_version(deps.storage, CONTRACT_NAME, CONTRACT_VERSION)?; - - let store_msg = StorageMsg::StoreObject { - data: msg.program.clone(), - pin: true, - }; - - let store_program_msg = WasmMsg::Execute { - contract_addr: msg.storage_address.clone(), - msg: to_json_binary(&store_msg)?, - funds: vec![], - }; - - Ok(Response::new().add_submessage( - SubMsg::reply_on_success(store_program_msg, STORE_PROGRAM_REPLY_ID) - .with_payload(Binary::from(msg.storage_address.as_bytes())), - )) -} - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn execute( - deps: DepsMut<'_>, - env: Env, - info: MessageInfo, - msg: ExecuteMsg, -) -> Result { - nonpayable(&info)?; - match msg { - ExecuteMsg::BreakStone {} => execute::break_stone(deps, env, info), - } -} - -pub mod execute { - use crate::state::{DEPENDENCIES, PROGRAM}; - use axone_objectarium::msg::PinsForObjectResponse; - use cosmwasm_std::{ensure_eq, Order}; - - use super::*; - - pub fn break_stone( - deps: DepsMut<'_>, - env: Env, - info: MessageInfo, - ) -> Result { - ensure_eq!( - deps.querier - .query_wasm_contract_info(env.contract.address)? - .creator, - info.sender, - ContractError::Unauthorized - ); - - let resp = Response::new().add_attribute("action", "break_stone"); - - let mut stone = PROGRAM.load(deps.storage)?; - if stone.broken { - return Ok(resp); - } - stone.broken = true; - PROGRAM.save(deps.storage, &stone)?; - - let law_release_msg = match deps - .querier - .query_wasm_smart::( - stone.law.storage_address.clone(), - &StorageQuery::PinsForObject { - object_id: stone.law.object_id.clone(), - first: Some(1u32), - after: None, - }, - )? - .page_info - .has_next_page - { - true => stone.law.to_exec_unpin_msg(vec![]), - _ => stone.law.to_exec_forget_msg(vec![]), - }?; - - Ok(resp.add_message(law_release_msg).add_messages( - DEPENDENCIES - .range(deps.storage, None, None, Order::Ascending) - .map(|res: StdResult<(String, ObjectRef)>| { - res.and_then(|(_, obj)| obj.to_exec_unpin_msg(vec![])) - }) - .collect::>>()?, - )) - } -} - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn query(deps: Deps<'_, LogicCustomQuery>, env: Env, msg: QueryMsg) -> StdResult { - match msg { - QueryMsg::Ask { query } => to_json_binary(&query::ask(deps, env, query)?), - QueryMsg::Program {} => to_json_binary(&query::program(deps)?), - QueryMsg::ProgramCode {} => to_json_binary(&query::program_code(deps)?), - } -} - -pub mod query { - use cosmwasm_std::QueryRequest; - - use axone_logic_bindings::{as_prolog_atom, Answer, AskResponse}; - - use crate::helper::object_ref_to_uri; - use crate::msg::ProgramResponse; - use crate::state::PROGRAM; - - use super::*; - - const ERR_STONE_BROKEN: &str = "system_error(broken_law_stone)"; - - pub fn program(deps: Deps<'_, LogicCustomQuery>) -> StdResult { - let program = PROGRAM.load(deps.storage)?.into(); - Ok(program) - } - - pub fn program_code(deps: Deps<'_, LogicCustomQuery>) -> StdResult { - let ObjectRef { - storage_address, - object_id, - } = PROGRAM.load(deps.storage)?.law; - - deps.querier.query_wasm_smart::( - storage_address, - &StorageQuery::ObjectData { id: object_id }, - ) - } - - pub fn ask( - deps: Deps<'_, LogicCustomQuery>, - env: Env, - query: String, - ) -> StdResult { - let stone = PROGRAM.load(deps.storage)?; - if stone.broken { - return Ok(AskResponse { - height: env.block.height, - answer: Some(Answer::from_error(format!( - "error({},root)", - ERR_STONE_BROKEN - ))), - ..Default::default() - }); - } - - let req: QueryRequest = build_ask_query(stone.law, query)?.into(); - deps.querier.query(&req) - } - - pub fn build_ask_query(program: ObjectRef, query: String) -> StdResult { - let program_uri = object_ref_to_uri(program)?; - - Ok(LogicCustomQuery::Ask { - program: format!(":- consult({}).", as_prolog_atom(&program_uri.to_string())), - query, - }) - } -} - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn reply( - deps: DepsMut<'_, LogicCustomQuery>, - env: Env, - msg: Reply, -) -> Result { - match msg.id { - STORE_PROGRAM_REPLY_ID => reply::store_program_reply(deps, env, msg), - _ => Err(ContractError::UnknownReplyID), - } -} - -pub mod reply { - use crate::helper::{ask_response_to_objects, get_reply_event_attribute, object_ref_to_uri}; - use crate::state::{LawStone, DEPENDENCIES, PROGRAM}; - use axone_logic_bindings::as_prolog_atom; - use cw_utils::ParseReplyError; - - use super::*; - - pub fn store_program_reply( - deps: DepsMut<'_, LogicCustomQuery>, - _env: Env, - msg: Reply, - ) -> Result { - msg.result - .into_result() - .map_err(ParseReplyError::SubMsgFailure) - .map_err(Into::into) - .and_then(|e| { - get_reply_event_attribute(&e.events, "id").ok_or_else(|| { - ParseReplyError::SubMsgFailure( - "reply event doesn't contains object id".to_string(), - ) - .into() - }) - }) - .and_then(|obj_id| { - Ok(LawStone { - broken: false, - law: ObjectRef { - object_id: obj_id, - storage_address: String::from_utf8(msg.payload.to_vec()).map_err(|e| { - ParseReplyError::SubMsgFailure(format!( - "could not convert reply payload into string address: {}", - e - )) - })?, - }, - }) - }) - .and_then(|stone| -> Result, ContractError> { - PROGRAM - .save(deps.storage, &stone) - .map_err(ContractError::from)?; - - let req = build_source_files_query(stone.law.clone())?.into(); - let res = deps.querier.query(&req).map_err(ContractError::from)?; - - let objects = ask_response_to_objects(res, "Files".to_string())?; - objects - .into_iter() - .filter(|obj| obj.object_id != stone.law.object_id) - .map(|obj| { - DEPENDENCIES.save(deps.storage, obj.object_id.as_str(), &obj)?; - Ok(SubMsg::new(obj.to_exec_pin_msg(vec![])?)) - }) - .collect() - }) - .map(|msg| Response::new().add_submessages(msg)) - } - - pub fn build_source_files_query(program: ObjectRef) -> StdResult { - let program_uri = object_ref_to_uri(program)?.to_string(); - - Ok(LogicCustomQuery::Ask { - program: "source_files(Files) :- bagof(File, source_file(File), Files).".to_string(), - query: format!( - "consult({}), source_files(Files).", - as_prolog_atom(&program_uri) - ), - }) - } -} - -#[cfg(test)] -mod tests { - use std::collections::VecDeque; - use std::marker::PhantomData; - - use cosmwasm_std::testing::{ - message_info, mock_dependencies, mock_env, MockApi, MockQuerier, - MockQuerierCustomHandlerResult, MockStorage, - }; - use cosmwasm_std::{ - coins, from_json, to_json_binary, ContractInfoResponse, ContractResult, CosmosMsg, Event, - Order, OwnedDeps, SubMsgResponse, SubMsgResult, SystemError, SystemResult, WasmQuery, - }; - use cw_utils::ParseReplyError::SubMsgFailure; - use cw_utils::PaymentError; - use cw_utils::PaymentError::NonPayable; - - use axone_logic_bindings::testing::mock::mock_dependencies_with_logic_handler; - use axone_logic_bindings::{ - Answer, AskResponse, LogicCustomQuery, Result as LogicResult, Substitution, - }; - use axone_objectarium::msg::{PageInfo, PinsForObjectResponse}; - use axone_wasm::uri::CosmwasmUri; - use testing::addr::{addr, CREATOR, SENDER}; - - use crate::msg::ProgramResponse; - use crate::state::{LawStone, DEPENDENCIES, PROGRAM}; - - use super::*; - - fn custom_logic_handler_with_dependencies( - dependencies: Vec, - program: ObjectRef, - request: &LogicCustomQuery, - ) -> MockQuerierCustomHandlerResult { - let mut updated_deps = dependencies; - updated_deps.push(CosmwasmUri::try_from(program.clone()).unwrap().to_string()); - let deps_name = format!("[{}]", &updated_deps.join(",")); - let LogicCustomQuery::Ask { - program: exp_program, - query: exp_query, - .. - } = reply::build_source_files_query(program).unwrap(); - match request { - LogicCustomQuery::Ask { program, query } - if *query == exp_query && *program == exp_program => - { - SystemResult::Ok( - to_json_binary(&AskResponse { - height: 1, - gas_used: 1000, - answer: Some(Answer { - has_more: false, - variables: vec!["Files".to_string()], - results: vec![LogicResult { - error: None, - substitutions: vec![Substitution { - variable: "Files".to_string(), - expression: deps_name, - }], - }], - }), - user_output: None, - }) - .into(), - ) - } - _ => SystemResult::Err(SystemError::InvalidRequest { - error: "Ask `souces_files(Files).` predicate not called".to_string(), - request: Default::default(), - }), - } - } - - #[test] - fn proper_initialization() { - let mut deps = - mock_dependencies_with_logic_handler(|_| SystemResult::Err(SystemError::Unknown {})); - let program = to_json_binary("foo(_) :- true.").unwrap(); - - let msg = InstantiateMsg { - program: program.clone(), - storage_address: "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv" - .to_string(), - }; - let info = message_info(&addr(CREATOR), &[]); - - let res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap(); - - // Check if a message is sent to the axone-objectarium to store the logic program. - assert_eq!(1, res.messages.len()); - let sub_msg = res.messages.first().unwrap(); - assert_eq!(STORE_PROGRAM_REPLY_ID, sub_msg.id); - assert_eq!( - "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv", - String::from_utf8(sub_msg.payload.to_vec()).unwrap() - ); - - match &sub_msg.msg { - CosmosMsg::Wasm(wasm_msg) => match wasm_msg { - WasmMsg::Execute { msg, .. } => { - let result: StorageMsg = from_json(msg).unwrap(); - match result { - StorageMsg::StoreObject { data, pin } => { - assert_eq!(data, program); - assert!(pin, "the main program should be pinned"); - } - _ => panic!("storage message should be a StoreObject message"), - } - } - _ => panic!("wasm message should be a Storage message"), - }, - _ => panic!("cosmos sub message should be a Wasm message execute"), - } - } - - #[test] - fn funds_initialization() { - let mut deps = - mock_dependencies_with_logic_handler(|_| SystemResult::Err(SystemError::Unknown {})); - let env = mock_env(); - let info = message_info(&addr(SENDER), &coins(10, "uaxone")); - - let msg = InstantiateMsg { - program: to_json_binary("foo(_) :- true.").unwrap(), - storage_address: "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv" - .to_string(), - }; - - let result = instantiate(deps.as_mut(), env, info, msg); - assert!(result.is_err()); - assert_eq!(result.unwrap_err(), ContractError::Payment(NonPayable {})); - } - - #[test] - fn program() { - let mut deps = - mock_dependencies_with_logic_handler(|_| SystemResult::Err(SystemError::Unknown {})); - - let object_id = - "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05".to_string(); - let storage_addr = - "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv".to_string(); - PROGRAM - .save( - deps.as_mut().storage, - &LawStone { - broken: false, - law: ObjectRef { - object_id: object_id.clone(), - storage_address: storage_addr.clone(), - }, - }, - ) - .unwrap(); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Program {}).unwrap(); - let result: ProgramResponse = from_json(&res).unwrap(); - - assert_eq!(object_id, result.object_id); - assert_eq!(storage_addr, result.storage_address); - } - - #[test] - fn program_code() { - const CONTRACT_ID: &str = - "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv"; - const OBJECT_ID: &str = "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05"; - const A_PROGRAM: &str = "foo(_) :- true."; - - let mut deps = - mock_dependencies_with_logic_handler(|_| SystemResult::Err(SystemError::Unknown {})); - deps.querier.update_wasm(move |query| match query { - WasmQuery::Smart { contract_addr, msg } if contract_addr == CONTRACT_ID => { - let data = to_json_binary(&A_PROGRAM).unwrap(); - let storage_query: StorageQuery = from_json(msg).unwrap(); - - assert!( - matches!(storage_query, StorageQuery::ObjectData { id } if id == OBJECT_ID) - ); - - SystemResult::Ok(ContractResult::Ok(to_json_binary(&data).unwrap())) - } - _ => { - panic!("UnsupportedRequest: query_wasm"); - } - }); - - PROGRAM - .save( - deps.as_mut().storage, - &LawStone { - broken: false, - law: ObjectRef { - object_id: OBJECT_ID.to_string(), - storage_address: CONTRACT_ID.to_string(), - }, - }, - ) - .unwrap(); - - let result = query(deps.as_ref(), mock_env(), QueryMsg::ProgramCode {}).unwrap(); - let data: Binary = from_json(&result).unwrap(); - let program: String = from_json(&data).unwrap(); - - assert_eq!(A_PROGRAM, program); - } - - fn custom_logic_handler_with_query( - env: &Env, - query: String, - program: ObjectRef, - request: &LogicCustomQuery, - ) -> MockQuerierCustomHandlerResult { - let LogicCustomQuery::Ask { - program: exp_program, - query: exp_query, - .. - } = query::build_ask_query(program, query.to_string()).unwrap(); - match request { - LogicCustomQuery::Ask { - program, - query: queryy, - } if *queryy == exp_query && *program == exp_program => SystemResult::Ok( - to_json_binary(&AskResponse { - height: env.block.height, - gas_used: 1000, - answer: Some(Answer { - has_more: false, - variables: vec!["Foo".to_string()], - results: vec![LogicResult { - error: None, - substitutions: vec![Substitution { - variable: "Foo".to_string(), - expression: "bar".to_string(), - }], - }], - }), - user_output: None, - }) - .into(), - ), - _ => SystemResult::Err(SystemError::InvalidRequest { - error: format!("Ask `{query}` predicate not called"), - request: Default::default(), - }), - } - } - - #[test] - fn ask() { - let cases = vec![ - ( - false, // broken - "test(Foo).".to_string(), // query - ObjectRef { - object_id: "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05" - .to_string(), - storage_address: - "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv" - .to_string(), - }, - Some(AskResponse { - height: 12345, - gas_used: 1000, - answer: Some(Answer { - variables: vec!["Foo".to_string()], - results: vec![axone_logic_bindings::Result { - substitutions: vec![Substitution { - variable: "Foo".to_string(), - expression: "bar".to_string(), - }], - ..Default::default() - }], - ..Default::default() - }), - ..Default::default() - }), - None, // Expected error - ), - ( - true, // broken - "test(Foo).".to_string(), // query - ObjectRef { - object_id: "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05" - .to_string(), - storage_address: - "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv" - .to_string(), - }, - Some(AskResponse { - height: 12345, - answer: Some(Answer { - results: vec![axone_logic_bindings::Result { - error: Some("error(system_error(broken_law_stone),root)".to_string()), - ..Default::default() - }], - ..Default::default() - }), - ..Default::default() - }), - None, // Expected error - ), - ]; - - for case in cases { - let p = Box::new(( - case.1.clone(), - case.2.object_id.to_string(), - case.2.storage_address.to_string(), - )); - let env = mock_env(); - let env_4_closure = env.clone(); - let mut deps = mock_dependencies_with_logic_handler(move |request| { - let (query, o, s) = p.as_ref(); - custom_logic_handler_with_query( - &env_4_closure, - query.to_string(), - ObjectRef { - object_id: o.to_string(), - storage_address: s.to_string(), - }, - request, - ) - }); - - PROGRAM - .save( - deps.as_mut().storage, - &LawStone { - broken: case.0, - law: case.2.clone(), - }, - ) - .unwrap(); - - let res = query(deps.as_ref(), env, QueryMsg::Ask { query: case.1 }); - - match res { - Ok(result) => { - let result: AskResponse = from_json(&result).unwrap(); - - assert!(case.3.is_some()); - assert!(result.answer.is_some()); - assert_eq!(result, case.3.unwrap()); - assert!(case.4.is_none(), "query doesn't return error") - } - Err(e) => { - assert!(case.4.is_some(), "query return error"); - assert_eq!(e, case.4.unwrap()) - } - } - } - } - - #[derive(Clone)] - struct StoreTestCase { - dependencies: Vec<(String, String, String)>, // URI, contract address, object id - object_id: String, - } - - #[test] - fn store_program_reply() { - let cases = vec![ - StoreTestCase { - dependencies: vec![ - ( - "cosmwasm:axone1dclchlcttf2uektxyryg0c6yau63eml5q9uq03myg44ml8cxpxnqen9apd?query=%7B%22object_data%22%3A%7B%22id%22%3A%20%224cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05%22%7D%7D".to_string(), - "axone1dclchlcttf2uektxyryg0c6yau63eml5q9uq03myg44ml8cxpxnqen9apd".to_string(), - "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05".to_string() - ), - ], - object_id: "0689c526187c6785dfcce28f8df19138da292598dc19548a852de1792062f271" - .to_string(), - }, - StoreTestCase { - dependencies: vec![], - object_id: "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05" - .to_string(), - }, - StoreTestCase { - dependencies: vec![ - ( - "cosmwasm:axone1dclchlcttf2uektxyryg0c6yau63eml5q9uq03myg44ml8cxpxnqen9apd?query=%7B%22object_data%22%3A%7B%22id%22%3A%20%224cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05%22%7D%7D".to_string(), - "axone1dclchlcttf2uektxyryg0c6yau63eml5q9uq03myg44ml8cxpxnqen9apd".to_string(), // contract addr - "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05".to_string() // object id - ), - ( - "cosmwasm:axone1dclchlcttf2uektxyryg0c6yau63eml5q9uq03myg44ml8cxpxnqen9apd?query=%7B%22object_data%22%3A%7B%22id%22%3A%20%220689c526187c6785dfcce28f8df19138da292598dc19548a852de1792062f271%22%7D%7D".to_string(), - "axone1dclchlcttf2uektxyryg0c6yau63eml5q9uq03myg44ml8cxpxnqen9apd".to_string(), // contract addr - "0689c526187c6785dfcce28f8df19138da292598dc19548a852de1792062f271".to_string() // object id - ), - ], - object_id: "1cc6de7672c97db145a3940df2264140ea893c6688fa5ca55b73cb8b68e0574d" - .to_string(), - }, - ]; - - for case in cases { - let uris = Box::new( - case.dependencies - .clone() - .into_iter() - .map(|(uri, _, _)| uri) - .collect::>(), - ); - let program_object_id = case.clone().object_id; - let mut deps = mock_dependencies_with_logic_handler(move |request| { - custom_logic_handler_with_dependencies( - uris.to_vec(), - ObjectRef { - object_id: program_object_id.clone(), - storage_address: - "axone1dclchlcttf2uektxyryg0c6yau63eml5q9uq03myg44ml8cxpxnqen9apd" - .to_string(), - }, - request, - ) - }); - - #[allow(deprecated)] - let reply = Reply { - id: STORE_PROGRAM_REPLY_ID, - payload: Binary::from( - "axone1dclchlcttf2uektxyryg0c6yau63eml5q9uq03myg44ml8cxpxnqen9apd".as_bytes(), - ), - gas_used: 0, - result: SubMsgResult::Ok(SubMsgResponse { - events: vec![Event::new("e".to_string()) - .add_attribute("id".to_string(), case.clone().object_id)], - data: None, - msg_responses: vec![], - }), - }; - - let response = reply::store_program_reply(deps.as_mut(), mock_env(), reply); - let res = response.unwrap(); - - let program = PROGRAM.load(&deps.storage).unwrap(); - assert!(!program.broken); - assert_eq!(case.clone().object_id, program.law.object_id); - - let deps_len_requirement = case.clone().dependencies.len(); - - if deps_len_requirement > 0 { - assert_eq!( - deps_len_requirement, - DEPENDENCIES - .keys_raw(&deps.storage, None, None, Order::Ascending) - .count() - ); - for (_, contract_addr, object_id) in case.clone().dependencies { - let o = DEPENDENCIES.load(&deps.storage, object_id.as_str()); - assert!( - o.is_ok(), - "dependencies should contains each object id dependencies as key" - ); - let o = o.unwrap(); - assert_eq!( - o.object_id, object_id, - "dependencies should contains each object id dependencies as key" - ); - assert_eq!( - o.storage_address, contract_addr, - "dependencies should contains each object id dependencies as key" - ); - } - } - - assert_eq!( - deps_len_requirement, - res.messages.len(), - "response should contains any sub message as dependencies" - ); - - let objects_pinned: Vec = res - .messages - .into_iter() - .flat_map(|sub_msg| -> Option { - match &sub_msg.msg { - CosmosMsg::Wasm(wasm_msg) => match wasm_msg { - WasmMsg::Execute { msg, .. } => { - let result: StorageMsg = from_json(msg).unwrap(); - match result { - StorageMsg::PinObject { id } => Some(id), - _ => panic!("should contains only PinObject message(s)"), - } - } - _ => panic!("wasm message should be a Storage message"), - }, - _ => panic!("cosmos sub message should be a Wasm message execute"), - } - }) - .collect(); - - for object in objects_pinned { - assert!( - DEPENDENCIES.has(&deps.storage, object.as_str()), - "each dependencies should be pinned by a PinObject message" - ) - } - } - } - - #[test] - fn program_reply_errors() { - let object_id = "axone1dclchlcttf2uektxyryg0c6yau63eml5q9uq03myg44ml8cxpxnqen9apd"; - #[allow(deprecated)] - let cases = vec![ - ( - Reply { - id: 404, - payload: Binary::from("axone1dclchlcttf2uektxyryg0c6yau63eml5q9uq03myg44ml8cxpxnqen9apd".as_bytes()), - gas_used: 0, - result: SubMsgResult::Ok(SubMsgResponse { - events: vec![Event::new("e".to_string()) - .add_attribute("id".to_string(), object_id.to_string())], - data: None, - msg_responses: vec![], - }), - }, - Err(ContractError::UnknownReplyID), - ), - ( - Reply { - id: 1, - payload: Binary::from("axone1dclchlcttf2uektxyryg0c6yau63eml5q9uq03myg44ml8cxpxnqen9apd".as_bytes()), - gas_used: 0, - result: SubMsgResult::Ok(SubMsgResponse { - events: vec![Event::new("e".to_string())], - data: None, - msg_responses: vec![], - }), - }, - Err(ContractError::ParseReplyError(SubMsgFailure( - "reply event doesn't contains object id".to_string(), - ))), - ), - ( - Reply { - id: 1, - payload: Binary::from(vec![0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89]), - gas_used: 0, - result: SubMsgResult::Ok(SubMsgResponse { - events: vec![Event::new("e".to_string()).add_attribute("id".to_string(), "some_object_id".to_string())], - data: None, - msg_responses: vec![], - }), - }, - Err(ContractError::ParseReplyError(SubMsgFailure( - "could not convert reply payload into string address: invalid utf-8 sequence of 1 bytes from index 0".to_string(), - ))), - ), - ]; - - for case in cases { - let mut deps = OwnedDeps { - storage: MockStorage::default(), - api: MockApi::default(), - querier: MockQuerier::default(), - custom_query_type: PhantomData, - }; - - let response = reply(deps.as_mut(), mock_env(), case.0); - - assert_eq!(response, case.1); - } - } - - #[test] - fn build_source_files_query() { - let result = reply::build_source_files_query(ObjectRef { - object_id: "1cc6de7672c97db145a3940df2264140ea893c6688fa5ca55b73cb8b68e0574d" - .to_string(), - storage_address: "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv" - .to_string(), - }); - - match result { - Ok(LogicCustomQuery::Ask { program, query }) => { - assert_eq!( - program, - "source_files(Files) :- bagof(File, source_file(File), Files)." - ); - assert_eq!(query, "consult('cosmwasm:axone-objectarium:axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv?query=%7B%22object_data%22%3A%7B%22id%22%3A%221cc6de7672c97db145a3940df2264140ea893c6688fa5ca55b73cb8b68e0574d%22%7D%7D'), source_files(Files).") - } - _ => panic!("Expected Ok(LogicCustomQuery)."), - } - } - - #[test] - fn build_ask_query() { - let result = query::build_ask_query( - ObjectRef { - object_id: "1cc6de7672c97db145a3940df2264140ea893c6688fa5ca55b73cb8b68e0574d" - .to_string(), - storage_address: "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv" - .to_string(), - }, - "test(X).".to_string(), - ); - - match result { - Ok(LogicCustomQuery::Ask { program, query }) => { - assert_eq!(program, ":- consult('cosmwasm:axone-objectarium:axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv?query=%7B%22object_data%22%3A%7B%22id%22%3A%221cc6de7672c97db145a3940df2264140ea893c6688fa5ca55b73cb8b68e0574d%22%7D%7D')."); - assert_eq!(query, "test(X).") - } - _ => panic!("Expected Ok(LogicCustomQuery)."), - } - } - - #[test] - fn execute_fail_with_funds() { - let mut deps = mock_dependencies(); - let env = mock_env(); - let info = message_info(&addr(SENDER), &coins(10, "uaxone")); - - let result = execute( - deps.as_mut(), - env.clone(), - info.clone(), - ExecuteMsg::BreakStone {}, - ); - assert!(result.is_err()); - assert_eq!( - result.unwrap_err(), - ContractError::Payment(PaymentError::NonPayable {}) - ); - } - - #[test] - fn break_stone() { - let cases = vec![ - (2, vec![]), - (1, vec![]), - ( - 1, - vec![ObjectRef { - storage_address: "addr1".to_string(), - object_id: "object1".to_string(), - }], - ), - ( - 3, - vec![ - ObjectRef { - storage_address: "addr1".to_string(), - object_id: "object1".to_string(), - }, - ObjectRef { - storage_address: "addr2".to_string(), - object_id: "object2".to_string(), - }, - ], - ), - ]; - - for case in cases { - let mut deps = mock_dependencies(); - deps.querier.update_wasm(move |req| match req { - WasmQuery::ContractInfo { .. } => SystemResult::Ok(ContractResult::Ok( - to_json_binary(&ContractInfoResponse::new( - 0, - addr(CREATOR), - None, - false, - None, - )) - .unwrap(), - )), - WasmQuery::Smart { contract_addr, msg } - if contract_addr == "axone-objectarium1" => - { - match from_json(msg) { - Ok(StorageQuery::PinsForObject { - object_id: id, - first: Some(1u32), - after: None, - }) if id == "program-id" => SystemResult::Ok(ContractResult::Ok( - to_json_binary(&PinsForObjectResponse { - data: vec!["creator".to_string()], - page_info: PageInfo { - has_next_page: case.0 > 1, - cursor: "".to_string(), - }, - }) - .unwrap(), - )), - _ => SystemResult::Err(SystemError::Unknown {}), - } - } - _ => SystemResult::Err(SystemError::Unknown {}), - }); - - PROGRAM - .save( - &mut deps.storage, - &LawStone { - broken: false, - law: ObjectRef { - object_id: "program-id".to_string(), - storage_address: "axone-objectarium1".to_string(), - }, - }, - ) - .unwrap(); - for dep in case.1.clone() { - let mut id = dep.storage_address.to_owned(); - id.push_str(dep.object_id.as_str()); - DEPENDENCIES - .save(&mut deps.storage, id.as_str(), &dep.clone()) - .unwrap(); - } - - let info = message_info(&addr(CREATOR), &[]); - let res = execute( - deps.as_mut(), - mock_env(), - info.clone(), - ExecuteMsg::BreakStone {}, - ) - .unwrap(); - - assert!(PROGRAM.load(&deps.storage).unwrap().broken); - - let mut sub_msgs: VecDeque = res.messages.into(); - match sub_msgs.pop_front() { - Some(SubMsg { - msg: cosmos_msg, .. - }) => match cosmos_msg { - CosmosMsg::Wasm(WasmMsg::Execute { - contract_addr, msg, .. - }) => { - assert_eq!(contract_addr, "axone-objectarium1".to_string()); - if case.0 > 1 { - match from_json(&msg) { - Ok(StorageMsg::UnpinObject { id }) => { - assert_eq!(id, "program-id".to_string()); - } - _ => panic!("storage message should be a UnpinObject message"), - } - } else { - match from_json(&msg) { - Ok(StorageMsg::ForgetObject { id }) => { - assert_eq!(id, "program-id".to_string()); - } - _ => panic!("storage message should be a ForgetObject message"), - } - } - } - _ => panic!("sub message should be a WasmMsg message"), - }, - _ => panic!("result should contains sub messages"), - } - - for dep in case.1 { - match sub_msgs.pop_front() { - Some(SubMsg { - msg: cosmos_msg, .. - }) => match cosmos_msg { - CosmosMsg::Wasm(WasmMsg::Execute { - contract_addr, msg, .. - }) => { - assert_eq!(contract_addr, dep.storage_address); - match from_json(&msg) { - Ok(StorageMsg::UnpinObject { id }) => { - assert_eq!(id, dep.object_id); - } - _ => panic!("storage message should be a UnpinObject message"), - } - } - _ => panic!("sub message should be a WasmMsg message"), - }, - _ => panic!("result should contains sub messages"), - } - } - } - } - - #[test] - fn break_stone_creator() { - let cases = vec![ - // creator, sender, broken, Error - (CREATOR, SENDER, false, Some(ContractError::Unauthorized)), - (CREATOR, SENDER, true, Some(ContractError::Unauthorized)), - (CREATOR, CREATOR, false, None), - (CREATOR, CREATOR, true, None), - ]; - - for case in cases { - let mut deps = mock_dependencies(); - deps.querier.update_wasm(move |req| match req { - WasmQuery::ContractInfo { .. } => { - let contract_info = - ContractInfoResponse::new(0, addr(case.0), None, false, None); - - SystemResult::Ok(ContractResult::Ok(to_json_binary(&contract_info).unwrap())) - } - WasmQuery::Smart { .. } => SystemResult::Ok(ContractResult::Ok( - to_json_binary(&PinsForObjectResponse { - data: vec![case.1.to_string()], - page_info: PageInfo { - has_next_page: false, - cursor: "".to_string(), - }, - }) - .unwrap(), - )), - _ => SystemResult::Err(SystemError::Unknown {}), - }); - - PROGRAM - .save( - &mut deps.storage, - &LawStone { - broken: case.2, - law: ObjectRef { - object_id: "id".to_string(), - storage_address: "addr".to_string(), - }, - }, - ) - .unwrap(); - - let res = execute( - deps.as_mut(), - mock_env(), - message_info(&addr(case.1), &[]), - ExecuteMsg::BreakStone {}, - ); - - match case.3 { - Some(err) => { - assert!(res.is_err()); - assert_eq!(res.err().unwrap(), err); - } - None => assert!(res.is_ok()), - }; - } - } - - #[test] - fn break_broken_stone() { - let mut deps = mock_dependencies(); - deps.querier.update_wasm(|req| match req { - WasmQuery::ContractInfo { .. } => SystemResult::Ok(ContractResult::Ok( - to_json_binary(&ContractInfoResponse::new( - 0, - addr(CREATOR), - None, - false, - None, - )) - .unwrap(), - )), - _ => SystemResult::Err(SystemError::Unknown {}), - }); - - PROGRAM - .save( - &mut deps.storage, - &LawStone { - broken: true, - law: ObjectRef { - object_id: "id".to_string(), - storage_address: "addr".to_string(), - }, - }, - ) - .unwrap(); - DEPENDENCIES - .save( - &mut deps.storage, - "id", - &ObjectRef { - object_id: "id2".to_string(), - storage_address: "addr2".to_string(), - }, - ) - .unwrap(); - - let res = execute( - deps.as_mut(), - mock_env(), - message_info(&addr(CREATOR), &[]), - ExecuteMsg::BreakStone {}, - ); - assert!(res.is_ok()); - assert_eq!(res.ok().unwrap().messages.len(), 0); - } -} diff --git a/contracts/axone-law-stone/src/error.rs b/contracts/axone-law-stone/src/error.rs deleted file mode 100644 index e251a800..00000000 --- a/contracts/axone-law-stone/src/error.rs +++ /dev/null @@ -1,44 +0,0 @@ -use axone_logic_bindings::error::TermParseError; -use axone_wasm::error::CosmwasmUriError; -use cosmwasm_std::StdError; -use cw_utils::{ParseReplyError, PaymentError}; -use thiserror::Error; - -#[derive(Debug, Error, PartialEq)] -pub enum ContractError { - #[error("{0}")] - Std(#[from] StdError), - - #[error("{0}")] - ParseReplyError(#[from] ParseReplyError), - - #[error("An unknown reply ID was received.")] - UnknownReplyID, - - #[error("Cannot parse cosmwasm uri: {0}")] - ParseCosmwasmUri(CosmwasmUriError), - - #[error("Cannot extract data from logic ask response: {0}")] - LogicAskResponse(LogicAskResponseError), - - #[error("Only the contract admin can perform this operation.")] - Unauthorized, - - #[error("{0}")] - Payment(#[from] PaymentError), -} - -#[derive(Debug, Eq, Error, PartialEq)] -pub enum LogicAskResponseError { - #[error("Could not parse term: {0}")] - Parse(TermParseError), - - #[error("Substitution error: {0}")] - Substitution(String), - - #[error("Unexpected response: {0}")] - Unexpected(String), - - #[error("Invalid parsed term format.")] - UnexpectedTerm, -} diff --git a/contracts/axone-law-stone/src/helper.rs b/contracts/axone-law-stone/src/helper.rs deleted file mode 100644 index 5e349952..00000000 --- a/contracts/axone-law-stone/src/helper.rs +++ /dev/null @@ -1,268 +0,0 @@ -use crate::error::LogicAskResponseError; -use crate::ContractError; -use axone_logic_bindings::{AskResponse, TermValue}; -use axone_objectarium_client::ObjectRef; -use axone_wasm::error::CosmwasmUriError; -use axone_wasm::uri::CosmwasmUri; -use cosmwasm_std::{Event, StdError, StdResult}; -use itertools::Itertools; -use std::any::type_name; - -pub fn object_ref_to_uri(object: ObjectRef) -> StdResult { - CosmwasmUri::try_from(object).map_err(|e: CosmwasmUriError| { - StdError::parse_err(type_name::(), e.to_string()) - }) -} - -pub fn get_reply_event_attribute(events: &[Event], key: &str) -> Option { - events - .iter() - .flat_map(|e| e.attributes.iter()) - .find(|a| a.key == key) - .map(|a| a.value.clone()) -} - -fn term_as_vec(term: TermValue) -> Result, ContractError> { - match term { - TermValue::Array(values) => values - .iter() - .map(|v| -> Result { - match v { - TermValue::Value(str) => Ok(str.clone()), - _ => Err(ContractError::LogicAskResponse( - LogicAskResponseError::UnexpectedTerm, - )), - } - }) - .collect(), - _ => Err(ContractError::LogicAskResponse( - LogicAskResponseError::UnexpectedTerm, - )), - } -} - -/// Extract the substitution of a specified variable, assuming a single result containing a single substitution in the -/// response. The substitution is then parsed as an array of [CosmwasmUri], returning their [ObjectRef] representation. -pub fn ask_response_to_objects( - res: AskResponse, - variable: String, -) -> Result, ContractError> { - let result = res - .answer - .map(|a| a.results) - .unwrap_or_default() - .into_iter() - .exactly_one() - .map_err(|_| { - ContractError::LogicAskResponse(LogicAskResponseError::Unexpected( - "expected exactly one result".to_string(), - )) - })?; - - if let Some(e) = result.error { - return Err(ContractError::LogicAskResponse( - LogicAskResponseError::Substitution(e), - )); - } - - let substitution = result - .substitutions - .into_iter() - .filter(|s| s.variable == variable) - .exactly_one() - .map_err(|_| { - ContractError::LogicAskResponse(LogicAskResponseError::Unexpected( - "expected exactly one substitution".to_string(), - )) - })?; - - substitution - .parse_expression() - .map_err(|e| ContractError::LogicAskResponse(LogicAskResponseError::Parse(e))) - .and_then(term_as_vec)? - .into_iter() - .map(|raw| { - CosmwasmUri::try_from(raw) - .and_then(ObjectRef::try_from) - .map_err(ContractError::ParseCosmwasmUri) - }) - .collect() -} - -#[cfg(test)] -mod tests { - use super::*; - use axone_logic_bindings::error::TermParseError; - use axone_logic_bindings::{Answer, Substitution}; - - #[test] - fn logic_to_objects() { - let cases = vec![ - ( - vec![axone_logic_bindings::Result { - error: None, - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "[]".to_string(), - }] - }], - Ok(vec![]) - ), - ( - vec![axone_logic_bindings::Result { - error: None, - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "['cosmwasm:axone-objectarium:axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv?query=%7B%22object_data%22%3A%7B%22id%22%3A%224cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05%22%7D%7D']".to_string(), - }] - }], - Ok(vec![ObjectRef{ - object_id: "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05".to_string(), - storage_address: "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv".to_string(), - }]) - ), - ( - vec![axone_logic_bindings::Result { - error: None, - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "['cosmwasm:axone-objectarium:axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv?query=%7B%22object_data%22%3A%7B%22id%22%3A%224cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05%22%7D%7D','cosmwasm:axone-objectarium:axone1cxmx7su8h5pvqca85cxdylz86uj9x9gu5xuqv34kw87q5x0hexdsr3g4x4?query=%7B%22object_data%22%3A%7B%22id%22%3A%221485133dd3ab4b1c4b8085e7265585f91ae3cca0996a39e0377a1059296f6aa7%22%7D%7D']".to_string(), - }] - }], - Ok(vec![ObjectRef{ - object_id: "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05".to_string(), - storage_address: "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv".to_string(), - },ObjectRef{ - object_id: "1485133dd3ab4b1c4b8085e7265585f91ae3cca0996a39e0377a1059296f6aa7".to_string(), - storage_address: "axone1cxmx7su8h5pvqca85cxdylz86uj9x9gu5xuqv34kw87q5x0hexdsr3g4x4".to_string(), - }]) - ), - ( - vec![axone_logic_bindings::Result { - error: None, - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "[,]".to_string(), - }] - }], - Err(ContractError::LogicAskResponse(LogicAskResponseError::Parse(TermParseError::EmptyValue))) - ), - ( - vec![axone_logic_bindings::Result { - error: None, - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "(1,2)".to_string(), - }] - }], - Err(ContractError::LogicAskResponse(LogicAskResponseError::UnexpectedTerm)) - ), - ( - vec![axone_logic_bindings::Result { - error: None, - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "[[]]".to_string(), - }] - }], - Err(ContractError::LogicAskResponse(LogicAskResponseError::UnexpectedTerm)) - ), - ( - vec![axone_logic_bindings::Result { - error: None, - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "[[]]".to_string(), - }] - }], - Err(ContractError::LogicAskResponse(LogicAskResponseError::UnexpectedTerm)) - ), - ( - vec![axone_logic_bindings::Result { - error: None, - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "['nawak']".to_string(), - }] - }], - Err(ContractError::ParseCosmwasmUri(CosmwasmUriError::ParseURI(url::ParseError::RelativeUrlWithoutBase))) - ), - ( - vec![axone_logic_bindings::Result { - error: None, - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "['cosmwasm:addr?query=%7B%22object%22%3A%7B%22id%22%3A%221485133dd3ab4b1c4b8085e7265585f91ae3cca0996a39e0377a1059296f6aa7%22%7D%7D']".to_string(), - }] - }], - Err(ContractError::ParseCosmwasmUri(CosmwasmUriError::Malformed("wrong query content".to_string()))) - ), - ( - vec![ - axone_logic_bindings::Result { - error: None, - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "[]".to_string(), - }] - }, - axone_logic_bindings::Result { - error: None, - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "[]".to_string(), - }] - }, - ], - Err(ContractError::LogicAskResponse(LogicAskResponseError::Unexpected("expected exactly one result".to_string()))) - ), - ( - vec![ - axone_logic_bindings::Result { - error: None, - substitutions: vec![ - Substitution { - variable: "X".to_string(), - expression: "[]".to_string(), - }, - Substitution { - variable: "X".to_string(), - expression: "[]".to_string(), - }, - ] - }, - ], - Err(ContractError::LogicAskResponse(LogicAskResponseError::Unexpected("expected exactly one substitution".to_string()))) - ), - ( - vec![axone_logic_bindings::Result { - error: Some("error".to_string()), - substitutions: vec![Substitution { - variable: "X".to_string(), - expression: "[]".to_string(), - }] - }], - Err(ContractError::LogicAskResponse(LogicAskResponseError::Substitution("error".to_string()))) - ), - ]; - - for case in cases { - assert_eq!( - ask_response_to_objects( - AskResponse { - answer: Some(Answer { - results: case.0, - has_more: false, - variables: vec!["X".to_string()], - }), - height: 1, - gas_used: 1, - user_output: None, - }, - "X".to_string() - ), - case.1 - ); - } - } -} diff --git a/contracts/axone-law-stone/src/lib.rs b/contracts/axone-law-stone/src/lib.rs deleted file mode 100644 index 77b76a8b..00000000 --- a/contracts/axone-law-stone/src/lib.rs +++ /dev/null @@ -1,7 +0,0 @@ -pub mod contract; -mod error; -mod helper; -pub mod msg; -pub mod state; - -pub use crate::error::ContractError; diff --git a/contracts/axone-law-stone/src/msg.rs b/contracts/axone-law-stone/src/msg.rs deleted file mode 100644 index 92f9ea69..00000000 --- a/contracts/axone-law-stone/src/msg.rs +++ /dev/null @@ -1,69 +0,0 @@ -#[allow(unused_imports)] -use axone_logic_bindings::AskResponse; -use cosmwasm_schema::{cw_serde, QueryResponses}; -use cosmwasm_std::Binary; - -/// Instantiate message -#[cw_serde] -pub struct InstantiateMsg { - /// The Prolog program carrying law rules and facts. - pub program: Binary, - - /// The `axone-objectarium` contract address on which to store the law program. - pub storage_address: String, -} - -/// Execute messages -#[cw_serde] -pub enum ExecuteMsg { - /// # BreakStone - /// Break the stone making this contract unusable, by clearing all the related resources: - /// - Unpin all the pinned objects on `axone-objectarium` contracts, if any. - /// - Forget the main program (i.e. or at least unpin it). - /// - /// Only the creator address (the address that instantiated the contract) is authorized to invoke - /// this message. - /// If already broken, this is a no-op. - BreakStone {}, -} - -/// Query messages -#[cw_serde] -#[derive(QueryResponses)] -pub enum QueryMsg { - /// # Ask - /// Submits a Prolog query string to the `Logic` module, evaluating it against the - /// law program associated with this contract. - /// - /// If the law stone is broken the query returns a response with the error `error(system_error(broken_law_stone),root)` - /// set in the `answer` field. - #[returns(AskResponse)] - Ask { query: String }, - - /// # Program - /// Retrieves the location metadata of the law program bound to this contract. - /// - /// This includes the contract address of the `objectarium` and the program object ID, - /// where the law program's code can be accessed. - #[returns(ProgramResponse)] - Program {}, - - /// # ProgramCode - /// Fetches the raw code of the law program tied to this contract. - /// - /// If the law stone is broken, the query may fail if the program is no longer available in the - /// `Objectarium`. - #[returns(Binary)] - ProgramCode {}, -} - -/// # ProgramResponse -/// ProgramResponse carry elements to locate the program in a `axone-objectarium` contract. -#[cw_serde] -pub struct ProgramResponse { - /// The program object id in the `axone-objectarium` contract. - pub object_id: String, - - /// The `axone-objectarium` contract address on which the law program is stored. - pub storage_address: String, -} diff --git a/contracts/axone-law-stone/src/state.rs b/contracts/axone-law-stone/src/state.rs deleted file mode 100644 index 2e7656af..00000000 --- a/contracts/axone-law-stone/src/state.rs +++ /dev/null @@ -1,24 +0,0 @@ -use serde::{Deserialize, Serialize}; - -use crate::msg::ProgramResponse; -use axone_objectarium_client::ObjectRef; -use cw_storage_plus::{Item, Map}; - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct LawStone { - pub broken: bool, - pub law: ObjectRef, -} - -impl From for ProgramResponse { - fn from(value: LawStone) -> ProgramResponse { - ProgramResponse { - object_id: value.law.object_id, - storage_address: value.law.storage_address, - } - } -} - -pub const PROGRAM: Item = Item::new("program"); - -pub const DEPENDENCIES: Map<&str, ObjectRef> = Map::new("dependencies"); diff --git a/contracts/axone-objectarium/Cargo.toml b/contracts/axone-objectarium/Cargo.toml deleted file mode 100644 index 302d0890..00000000 --- a/contracts/axone-objectarium/Cargo.toml +++ /dev/null @@ -1,53 +0,0 @@ -[package] -authors = { workspace = true } -description = "A Smart Contract which enables the storage of arbitrary unstructured Objects." -edition = { workspace = true } -homepage = { workspace = true } -keywords = { workspace = true } -license = { workspace = true } -name = "axone-objectarium" -repository = { workspace = true } -rust-version = { workspace = true } -version = { workspace = true } - -exclude = [ - # Those files are rust-optimizer artifacts. You might want to commit them for convenience but they should not be part of the source code publication. - "contract.wasm", - "hash.txt", -] - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[lib] -crate-type = ["cdylib", "rlib"] - -[dependencies] -base16ct = { version = "0.2.0", features = ["alloc"] } -bin-it = "1.2.0" -bs58 = "0.5.1" -cosmwasm-schema.workspace = true -cosmwasm-std.workspace = true -cw-ownable = "2.1.0" -cw-storage-plus.workspace = true -cw-utils.workspace = true -cw2.workspace = true -derive_builder = "0.20.2" -enum-iterator = "2.1.0" -lzma-rs = "0.3.0" -md-5 = "0.10.6" -schemars.workspace = true -serde.workspace = true -sha2 = "0.10.9" -snap = "1" -thiserror.workspace = true - -[dev-dependencies] -base64 = "0.22.1" -testing.workspace = true - -[features] -# use library feature to disable all instantiate/execute/query exports -library = [] - -[package.metadata.cargo-machete] -ignored = ["md5"] diff --git a/contracts/axone-objectarium/Makefile.toml b/contracts/axone-objectarium/Makefile.toml deleted file mode 100644 index 944012e8..00000000 --- a/contracts/axone-objectarium/Makefile.toml +++ /dev/null @@ -1,12 +0,0 @@ -[tasks.generate_schema] -args = ["run", "--bin", "schema"] -command = "cargo" - -[tasks.schema] -dependencies = ["generate_schema"] -script = ''' -SCHEMA=$(find schema -type f -maxdepth 1 -name '*.json' -print0) -TITLE=$(jq -r .contract_name $SCHEMA) -jq --arg description "$(cat README.md)" '. + {description: $description}' $SCHEMA > $SCHEMA.tmp && mv $SCHEMA.tmp $SCHEMA -jq --arg title $TITLE '. + {title: $title}' $SCHEMA > $SCHEMA.tmp && mv $SCHEMA.tmp $SCHEMA -''' diff --git a/contracts/axone-objectarium/README.md b/contracts/axone-objectarium/README.md deleted file mode 100644 index dc886195..00000000 --- a/contracts/axone-objectarium/README.md +++ /dev/null @@ -1,140 +0,0 @@ -# Objectarium - -A [CosmWasm](https://cosmwasm.com/) Smart Contract which enables the storage of arbitrary unstructured [Objects](https://en.wikipedia.org/wiki/Object_storage) in any [Cosmos](https://cosmos.network/) blockchains. - -## Purpose - -The smart contract serves as a robust storage solution, allowing for the storage of arbitrary `objects` on any blockchain within the [Cosmos blockchains](https://cosmos.network/) network, utilizing the [CosmWasm](https://cosmwasm.com/) framework. The key features of the contract include: - -**Versatile Data Storage:** -The contract is designed to accommodate any type of data, be it text, images, or complex data structures. This flexibility makes it an ideal choice for a wide range of decentralized applications (dApps) that require diverse storage needs. - -**On-chain Data:** -By design, the contract stores data on the blockchain, ensuring that it is immutable and publicly accessible. This is particularly useful for applications that require a high level of transparency, and also for any other smart contract that needs to store data on the blockchain. - -**Pinning and Unpinning:** -One unique feature is the ability to 'pin' and 'unpin' objects associated with a specific sender address. Pinning ensures that the object remains stored and accessible, while unpinning releases it from being permanently stored, offering a level of control over data persistence. - -**Object Removal:** -The contract also includes a 'forget' function, allowing for the removal of objects that are no longer pinned. This is particularly useful for managing storage costs and ensuring that only relevant data remains on the blockchain. - -**Cost Management:** -Features like pinning, unpinning, and discarding objects offer a strategic way to control storage costs. Additionally, setting limits on contract size — for instance in terms of object count and their individual sizes — serves as a practical tool to regulate storage costs. - -## Rationale - -In a sense, we can consider blockchains built on the [Cosmos L0](https://docs.cosmos.network/main) layer as decentralized databases, and their nature can be shaped and modeled through the smart contracts or modules. Given this, it provides a great opportunity to address the wide range of data management needs. One such important area is the management of unstructured, immutable data, which is written once but accessed frequently — commonly known as object storage. This is the primary focus of `axone-objectarium`: a specialized smart contract designed to offer a versatile and efficient approach to handling *on-chain*, *unstructured*, *immutable* data in a *decentralized* manner. - -## Terminology - -### Object - -In the context of the `axone-objectarium` smart contract, an `object` refers to a piece of data stored on the blockchain. It can represent various types of information, such as documents, binary files, or any other digital content. Objects are immutable once stored and are identified by their cryptographic hash, which can be generated using algorithms like MD5 or SHA256. This ensures the integrity and security of the stored data, as any modification to the object would result in a different hash value. - -### Bucket - -The smart contract is organized around buckets. A bucket represents a logical container within the `axone-objectarium` smart contract instance that groups related Objects together. It acts as a storage unit for Objects and provides a context for managing and organizing them. Each bucket has a unique name and is associated with a set of configurations and limits that define its behaviour and characteristics. - -### Pin - -Pin refers to a mechanism that allows users to mark or "pin" specific objects within a bucket. Pinning an object serves as a way to ensure that the object remains in storage and cannot be removed (this is called "forgotten"). It provides protection and guarantees that the pinned object will persist in the protocol. When an object is pinned, it is associated with the identity (or sender) that performed the pinning action. - -## Usage - -The unstructured nature of the data stored in the chain opens up a plethora of possibilities for decentralized applications that require this type of versatile storage. - -### In the AXONE protocol - -The primary function of this smart contract within the AXONE protocol is to enable the persistence of governance rules, which are encoded in Prolog. These programs are stored in an immutable format within the protocol and can be referenced by their unique identifiers in situations where there is a need to refer to these rules. - -### In the wild world - -A plethora of possibilities opens up for decentralized applications (dApps) that require this kind of versatile storage. However, it's important to consider the following constraints: the data is immutable, the cost of recording the data is proportional to its size, and the data is publicly accessible. - -## Play - -### Instantiation - -The `axone-objectarium` can be instantiated as follows, refer to the schema for more information on configuration, limits and pagination configuration: - -```bash -axoned tx wasm instantiate $CODE_ID \ - --label "my-storage" \ - --from $ADDR \ - --admin $ADMIN_ADDR \ - --gas 1000000 \ - '{"bucket":"my-bucket"}' -``` - -### Execution - -We can store an object by providing its data in base64 encoded, we can pin the stored object to prevent it from being removed: - -```bash -axoned tx wasm execute $CONTRACT_ADDR \ - --from $ADDR \ - --gas 1000000 \ - "{\"store_object\":{\"data\": \"$(cat my-data | base64)\",\"pin\":true}}" -``` - -The object id is stable as it is a hash, we can't store an object twice. - -With the following commands we can pin and unpin existing objects: - -```bash -axoned tx wasm execute $CONTRACT_ADDR \ - --from $ADDR \ - --gas 1000000 \ - "{\"pin_object\":{\"id\": \"$OBJECT_ID\"}}" - -axoned tx wasm execute $CONTRACT_ADDR \ - --from $ADDR \ - --gas 1000000 \ - "{\"unpin_object\":{\"id\": \"$OBJECT_ID\"}}" -``` - -And if an object is not pinned, or pinned by the sender of transaction, we can remove it: - -```bash -axoned tx wasm execute $CONTRACT_ADDR \ - --from $ADDR \ - --gas 1000000 \ - "{\"forget_object\":{\"id\": \"$OBJECT_ID\"}}" -``` - -### Querying - -Query an object by its id: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"object\": {\"id\": \"$OBJECT_ID\"}}" -``` - -Or its data: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"object_data\": {\"id\": \"$OBJECT_ID\"}}" -``` - -We can also list the objects, eventually filtering on the object owner: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"objects\": {\"address\": \"axone1p8u47en82gmzfm259y6z93r9qe63l25d858vqu\"}}" -``` - -And navigate in a cursor based pagination: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"objects\": {\"first\": 5, \"after\": \"23Y5t5DBe7DkPwfJo3Sd26Y8Z9epmtpA1FTpdG7DiG6MD8vPRTzzbQ9TccmyoBcePkPK6atUiqcAzJVo3TfYNBGY\"}}" -``` - -We can also query object pins with the same cursor based pagination: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"object_pins\": {\"id\": \"$OBJECT_ID\", \"first\": 5, \"after\": \"23Y5t5DBe7DkPwfJo3Sd26Y8Z9epmtpA1FTpdG7DiG6MD8vPRTzzbQ9TccmyoBcePkPK6atUiqcAzJVo3TfYNBGY\"}}" -``` diff --git a/contracts/axone-objectarium/src/bin/schema.rs b/contracts/axone-objectarium/src/bin/schema.rs deleted file mode 100644 index 5f042a16..00000000 --- a/contracts/axone-objectarium/src/bin/schema.rs +++ /dev/null @@ -1,11 +0,0 @@ -use cosmwasm_schema::write_api; - -use axone_objectarium::msg::{ExecuteMsg, InstantiateMsg, QueryMsg}; - -fn main() { - write_api! { - instantiate: InstantiateMsg, - execute: ExecuteMsg, - query: QueryMsg, - } -} diff --git a/contracts/axone-objectarium/src/compress.rs b/contracts/axone-objectarium/src/compress.rs deleted file mode 100644 index 4aa6f246..00000000 --- a/contracts/axone-objectarium/src/compress.rs +++ /dev/null @@ -1,150 +0,0 @@ -use std::io; - -use enum_iterator::Sequence; -use lzma_rs; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use snap; -use thiserror::Error; - -/// CompressionAlgorithm is an enumeration that defines the different compression algorithms -/// supported for compressing the content of objects. -#[derive( - Clone, Copy, Debug, Default, Deserialize, Eq, JsonSchema, PartialEq, Sequence, Serialize, -)] -pub enum CompressionAlgorithm { - /// Represents the "No compression" algorithm. - #[default] - Passthrough, - /// Represents the Snappy algorithm. - Snappy, - /// Represents the LZMA algorithm. - Lzma, -} - -impl CompressionAlgorithm { - /// compress returns the compressed data using the given algorithm. - pub fn compress(&self, data: &[u8]) -> Result, CompressionError> { - let compressor = match self { - CompressionAlgorithm::Passthrough => passthrough, - CompressionAlgorithm::Snappy => snappy_compress, - CompressionAlgorithm::Lzma => lzma_compress, - }; - compressor(data) - } - - /// decompress returns the decompressed data using the given algorithm. - /// The data must be compressed using the same algorithm. - pub fn decompress(&self, data: &[u8]) -> Result, CompressionError> { - let decompressor = match self { - CompressionAlgorithm::Passthrough => passthrough, - CompressionAlgorithm::Snappy => snappy_decompress, - CompressionAlgorithm::Lzma => lzma_decompress, - }; - decompressor(data) - } -} - -#[derive(Debug, Eq, Error, PartialEq)] -pub enum CompressionError { - #[error("{0}")] - Error(String), -} - -impl From for CompressionError { - fn from(err: io::Error) -> Self { - CompressionError::Error(err.to_string()) - } -} - -impl From for CompressionError { - fn from(err: lzma_rs::error::Error) -> Self { - CompressionError::Error(err.to_string()) - } -} - -/// pass_through returns the data as is. -#[inline] -#[allow(clippy::unnecessary_wraps)] -fn passthrough(data: &[u8]) -> Result, CompressionError> { - Ok(data.to_vec()) -} - -// snappy_compress returns the Snappy compressed data. -#[inline] -fn snappy_compress(data: &[u8]) -> Result, CompressionError> { - let mut reader = io::Cursor::new(data); - let mut writer = Vec::new(); - { - let mut snappy_writer = snap::write::FrameEncoder::new(&mut writer); - io::copy(&mut reader, &mut snappy_writer)?; - } - Ok(writer) -} - -// snappy_decompress returns the Snappy decompressed data. -#[inline] -fn snappy_decompress(data: &[u8]) -> Result, CompressionError> { - let reader = io::Cursor::new(data); - let mut snappy_reader = snap::read::FrameDecoder::new(reader); - let mut writer = Vec::new(); - io::copy(&mut snappy_reader, &mut writer)?; - Ok(writer) -} - -// lzma_compress returns the LZMA compressed data. -#[inline] -fn lzma_compress(data: &[u8]) -> Result, CompressionError> { - let mut reader = io::Cursor::new(data); - let mut writer = Vec::new(); - lzma_rs::lzma_compress(&mut reader, &mut writer)?; - Ok(writer) -} - -// lzma_decompress returns the LZMA decompressed data. -#[inline] -fn lzma_decompress(data: &[u8]) -> Result, CompressionError> { - let mut reader = io::Cursor::new(data); - let mut writer = Vec::new(); - lzma_rs::lzma_decompress(&mut reader, &mut writer)?; - Ok(writer) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_from_io_decompress_error() { - let cases = vec![ - ( - std::io::Error::new( - std::io::ErrorKind::InvalidData, - "the expected decompressed size differs, actual 998, expected 1000", - ), - CompressionError::Error( - "the expected decompressed size differs, actual 998, expected 1000".to_string(), - ), - ), - ( - std::io::Error::new( - std::io::ErrorKind::InvalidData, - lzma_rs::error::Error::IoError(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "the expected decompressed size differs, actual 998, expected 1000", - )), - ), - CompressionError::Error( - "io error: the expected decompressed size differs, actual 998, expected 1000" - .to_string(), - ), - ), - ]; - - for (error, expected_error) in cases { - let compression_err = CompressionError::from(error); - - assert_eq!(compression_err, expected_error); - } - } -} diff --git a/contracts/axone-objectarium/src/contract.rs b/contracts/axone-objectarium/src/contract.rs deleted file mode 100644 index 6905a55e..00000000 --- a/contracts/axone-objectarium/src/contract.rs +++ /dev/null @@ -1,3141 +0,0 @@ -use crate::error::BucketError; -#[cfg(not(feature = "library"))] -use cosmwasm_std::entry_point; -use cosmwasm_std::{ - to_json_binary, Binary, Deps, DepsMut, Env, MessageInfo, Order, Response, StdResult, Storage, -}; -use cw2::set_contract_version; -use cw_utils::nonpayable; - -use crate::crypto; -use crate::crypto::Hash; -use crate::error::ContractError; -use crate::msg::{ExecuteMsg, InstantiateMsg, ObjectId, QueryMsg}; -use crate::state; -use crate::state::{pins, Bucket, Object, Pin, BUCKET, DATA, OBJECT}; - -// version info for migration info -const CONTRACT_NAME: &str = concat!("crates.io:", env!("CARGO_PKG_NAME")); -const CONTRACT_VERSION: &str = env!("CARGO_PKG_VERSION"); - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn instantiate( - deps: DepsMut<'_>, - _env: Env, - info: MessageInfo, - msg: InstantiateMsg, -) -> Result { - nonpayable(&info)?; - cw_ownable::initialize_owner(deps.storage, deps.api, msg.owner.as_deref())?; - let bucket = Bucket::try_new( - msg.bucket, - msg.config.into(), - msg.limits.try_into()?, - msg.pagination.try_into()?, - )?; - - set_contract_version(deps.storage, CONTRACT_NAME, CONTRACT_VERSION)?; - BUCKET.save(deps.storage, &bucket)?; - - Ok(Response::default()) -} - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn execute( - deps: DepsMut<'_>, - env: Env, - info: MessageInfo, - msg: ExecuteMsg, -) -> Result { - nonpayable(&info)?; - - match msg { - ExecuteMsg::StoreObject { data, pin } => execute::store_object(deps, info, data, pin), - ExecuteMsg::PinObject { id } => execute::pin_object(deps, info, id), - ExecuteMsg::UnpinObject { id } => execute::unpin_object(deps, info, id), - ExecuteMsg::ForgetObject { id } => execute::forget_object(deps, info, id), - ExecuteMsg::UpdateOwnership(action) => { - cw_ownable::update_ownership(deps, &env.block, &info.sender, action)?; - Ok(Response::new()) - } - } -} - -pub mod execute { - use super::*; - use crate::crypto::Hash; - use crate::state::BucketLimits; - use crate::ContractError::ObjectPinned; - use cosmwasm_std::{Addr, Storage, Uint128}; - - pub fn store_object( - deps: DepsMut<'_>, - info: MessageInfo, - data: Binary, - pin: bool, - ) -> Result { - let size = (data.len() as u128).into(); - let bucket = BUCKET.load(deps.storage)?; - - // pre-conditions - if let Some(limit) = bucket.limits.max_object_size { - if size > limit { - return Err(BucketError::MaxObjectSizeLimitExceeded(size, limit).into()); - } - } - if let Some(limit) = bucket.limits.max_objects { - let value = bucket.stat.object_count + Uint128::one(); - if value > limit { - return Err(BucketError::MaxObjectsLimitExceeded(value, limit).into()); - } - } - if let Some(limit) = bucket.limits.max_object_pins { - if pin && limit.is_zero() { - return Err(BucketError::MaxObjectPinsLimitExceeded(Uint128::one(), limit).into()); - } - } - if let Some(limit) = bucket.limits.max_total_size { - let value = bucket.stat.size + size; - if value > limit { - return Err(BucketError::MaxTotalSizeLimitExceeded(value, limit).into()); - } - } - - // store object data - let id = crypto::hash(&bucket.config.hash_algorithm.into(), &data.to_vec()); - let mut res = Response::new() - .add_attribute("action", "store_object") - .add_attribute("id", id.to_string()); - - let exists = object_exists(deps.storage, &id); - let mut object = if !exists { - let compression = bucket.config.compression_algorithm; - let compressed_data = compression.compress(&data)?; - let compressed_size = Uint128::from(compressed_data.len() as u128); - - DATA.save(deps.storage, id.clone(), &compressed_data)?; - BUCKET.update(deps.storage, |mut bucket| -> Result<_, ContractError> { - let stat = &mut bucket.stat; - stat.size += size; - stat.object_count += Uint128::one(); - stat.compressed_size += compressed_size; - Ok(bucket) - })?; - - res = res - .add_attribute("size", size) - .add_attribute("compressed_size", compressed_size); - - Object { - id: id.clone(), - size, - pin_count: Uint128::zero(), - compression, - compressed_size, - } - } else { - OBJECT.load(deps.storage, id.clone())? - }; - - let pinned = if pin { - may_pin_object(deps.storage, info.sender, &mut object)? - } else { - false - }; - - if !exists || pinned { - OBJECT.save(deps.storage, id, &object)?; - } - - Ok(res.add_attribute("pinned", pinned.to_string())) - } - - pub fn pin_object( - deps: DepsMut<'_>, - info: MessageInfo, - object_id: ObjectId, - ) -> Result { - let id: Hash = object_id.clone().try_into()?; - let mut object = OBJECT.load(deps.storage, id.clone())?; - - let pinned = may_pin_object(deps.storage, info.sender, &mut object)?; - if pinned { - OBJECT.save(deps.storage, id, &object)?; - } - - Ok(Response::new() - .add_attribute("action", "pin_object") - .add_attribute("id", object_id) - .add_attribute("pinned", pinned.to_string())) - } - - pub fn unpin_object( - deps: DepsMut<'_>, - info: MessageInfo, - object_id: ObjectId, - ) -> Result { - let id: Hash = object_id.clone().try_into()?; - let mut object = OBJECT.load(deps.storage, id.clone())?; - - let unpinned = maybe_unpin_object(deps.storage, info.sender, &mut object)?; - if unpinned { - OBJECT.save(deps.storage, id, &object)?; - } - - Ok(Response::new() - .add_attribute("action", "unpin_object") - .add_attribute("id", object_id) - .add_attribute("unpinned", unpinned.to_string())) - } - - pub fn forget_object( - deps: DepsMut<'_>, - info: MessageInfo, - object_id: ObjectId, - ) -> Result { - let id: Hash = object_id.clone().try_into()?; - - let pinned_by_sender = (id.clone(), info.sender); - if pins().has(deps.storage, pinned_by_sender.clone()) { - pins().remove(deps.storage, pinned_by_sender)?; - } - - let still_pinned = is_pinned(deps.storage, &id); - if still_pinned { - return Err(ObjectPinned {}); - } - - let object = query::object(deps.as_ref(), object_id.clone())?; - BUCKET.update(deps.storage, |mut b| -> Result<_, ContractError> { - b.stat.object_count -= Uint128::one(); - b.stat.size -= object.size; - b.stat.compressed_size -= object.compressed_size; - Ok(b) - })?; - - OBJECT.remove(deps.storage, id.clone()); - DATA.remove(deps.storage, id); - - Ok(Response::new() - .add_attribute("action", "forget_object") - .add_attribute("id", object_id)) - } - - fn may_pin_object( - storage: &mut dyn Storage, - pinner: Addr, - target: &mut Object, - ) -> Result { - let key = (target.id.clone(), pinner.clone()); - if pins().has(storage, key) { - return Ok(false); - } - - target.pin_count += Uint128::one(); - - let bucket = BUCKET.load(storage)?; - - match bucket.limits { - BucketLimits { - max_object_pins: Some(max), - .. - } if max < target.pin_count => { - Err(BucketError::MaxObjectPinsLimitExceeded(target.pin_count, max).into()) - } - _ => { - pins().save( - storage, - (target.id.clone(), pinner.clone()), - &Pin { - id: target.id.clone(), - address: pinner, - }, - )?; - Ok(true) - } - } - } - - fn maybe_unpin_object( - storage: &mut dyn Storage, - pinner: Addr, - target: &mut Object, - ) -> Result { - let key = (target.id.clone(), pinner); - - if !pins().has(storage, key.clone()) { - return Ok(false); - } - - pins().remove(storage, key)?; - - if target.pin_count > Uint128::zero() { - target.pin_count -= Uint128::one(); - } - - Ok(true) - } -} - -#[cfg_attr(not(feature = "library"), entry_point)] -pub fn query(deps: Deps<'_>, _env: Env, msg: QueryMsg) -> StdResult { - match msg { - QueryMsg::Bucket {} => to_json_binary(&query::bucket(deps)?), - QueryMsg::Object { id } => to_json_binary(&query::object(deps, id)?), - QueryMsg::ObjectData { id } => to_json_binary(&query::data(deps, id)?), - QueryMsg::Objects { after, first } => to_json_binary(&query::objects(deps, after, first)?), - QueryMsg::ObjectsPinnedBy { - address, - first, - after, - } => to_json_binary(&query::objects_pinned_by(deps, address, after, first)?), - QueryMsg::PinsForObject { - object_id: id, - after, - first, - } => to_json_binary(&query::pins_for_object(deps, id, after, first)?), - QueryMsg::Ownership {} => to_json_binary(&cw_ownable::get_ownership(deps.storage)?), - } -} - -pub mod query { - use super::*; - use crate::crypto::Hash; - use crate::msg::{ - BucketResponse, Cursor, ObjectResponse, ObjectsResponse, PageInfo, PinsForObjectResponse, - }; - use crate::pagination::{PaginationHandler, QueryPage}; - use crate::state::PinPK; - use cosmwasm_std::{Order, StdError}; - - pub(crate) fn bucket(deps: Deps<'_>) -> StdResult { - let bucket = BUCKET.load(deps.storage)?; - - Ok(BucketResponse { - name: bucket.name, - config: bucket.config.into(), - limits: bucket.limits.into(), - pagination: bucket.pagination.into(), - stat: bucket.stat.into(), - }) - } - - pub(crate) fn object(deps: Deps<'_>, object_id: ObjectId) -> StdResult { - let id: Hash = object_id.try_into()?; - let object = OBJECT.load(deps.storage, id)?; - Ok((&object).into()) - } - - pub(crate) fn data(deps: Deps<'_>, object_id: ObjectId) -> StdResult { - let id: Hash = object_id.try_into()?; - let compression = OBJECT.load(deps.storage, id.clone())?.compression; - let data = DATA.load(deps.storage, id)?; - - compression - .decompress(&data) - .map_err(|e| StdError::serialize_err(format!("{:?}", compression), e)) - .map(Binary::from) - } - - pub(crate) fn objects( - deps: Deps<'_>, - after: Option, - first: Option, - ) -> StdResult { - let pagination = BUCKET.load(deps.storage)?.pagination; - let handler = PaginationHandler::<'_, Object, Hash>::from(pagination); - let (objects, page_info) = handler.query_page( - |min| OBJECT.range(deps.storage, min, None, Order::Ascending), - after, - first, - )?; - - let data = objects - .into_iter() - .map(|obj| ObjectResponse { - id: obj.id.to_string(), - is_pinned: is_pinned(deps.storage, &obj.id), - size: obj.size, - compressed_size: obj.compressed_size, - }) - .collect(); - - Ok(ObjectsResponse { data, page_info }) - } - - pub(crate) fn objects_pinned_by( - deps: Deps<'_>, - address: String, - after: Option, - first: Option, - ) -> StdResult { - let addr = deps.api.addr_validate(&address)?; - - let pagination = BUCKET.load(deps.storage)?.pagination; - let handler: PaginationHandler<'_, Pin, PinPK> = PaginationHandler::from(pagination); - - let (pins, page_info) = handler.query_page( - |min_bound| { - pins().idx.address.prefix(addr.clone()).range( - deps.storage, - min_bound, - None, - Order::Ascending, - ) - }, - after, - first, - )?; - - let mut data = Vec::with_capacity(pins.len()); - for pin in pins { - let obj = OBJECT.load(deps.storage, pin.id.clone())?; - - data.push(ObjectResponse { - id: obj.id.to_string(), - is_pinned: true, - size: obj.size, - compressed_size: obj.compressed_size, - }); - } - - Ok(ObjectsResponse { data, page_info }) - } - - pub(crate) fn pins_for_object( - deps: Deps<'_>, - object_id: ObjectId, - after: Option, - first: Option, - ) -> StdResult { - let id: Hash = object_id.try_into()?; - - require_object(deps.storage, &id)?; - - let pagination = BUCKET.load(deps.storage)?.pagination; - let handler: PaginationHandler<'_, Pin, PinPK> = PaginationHandler::from(pagination); - let (pins, page_info): (Vec, PageInfo) = handler.query_page( - |min_bound| { - pins().idx.object.prefix(id.clone()).range( - deps.storage, - min_bound, - None, - Order::Ascending, - ) - }, - after, - first, - )?; - - Ok(PinsForObjectResponse { - data: pins - .into_iter() - .map(|pin| pin.address.into_string()) - .collect(), - page_info, - }) - } -} - -#[inline] -fn is_pinned(storage: &dyn Storage, id: &Hash) -> bool { - pins() - .idx - .object - .prefix(id.clone()) - .keys_raw(storage, None, None, Order::Ascending) - .next() - .is_some() -} - -fn object_exists(storage: &dyn Storage, id: &Hash) -> bool { - OBJECT.has(storage, id.clone()) -} - -fn require_object(storage: &dyn Storage, id: &Hash) -> StdResult<()> { - OBJECT.load(storage, id.clone())?; - Ok(()) -} - -impl From for crypto::HashAlgorithm { - fn from(algorithm: state::HashAlgorithm) -> Self { - match algorithm { - state::HashAlgorithm::MD5 => crypto::HashAlgorithm::MD5, - state::HashAlgorithm::Sha224 => crypto::HashAlgorithm::Sha224, - state::HashAlgorithm::Sha256 => crypto::HashAlgorithm::Sha256, - state::HashAlgorithm::Sha384 => crypto::HashAlgorithm::Sha384, - state::HashAlgorithm::Sha512 => crypto::HashAlgorithm::Sha512, - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::compress; - use crate::crypto::Hash; - use crate::error::BucketError; - use crate::msg::{ - BucketConfig, BucketConfigBuilder, BucketLimitsBuilder, BucketResponse, BucketStat, - BucketStatBuilder, CompressionAlgorithm, HashAlgorithm, ObjectResponse, ObjectsResponse, - PageInfo, PaginationConfigBuilder, PinsForObjectResponse, - }; - use base64::{engine::general_purpose, Engine as _}; - use cosmwasm_std::testing::{message_info, mock_dependencies, mock_env}; - use cosmwasm_std::StdError::NotFound; - use cosmwasm_std::{coins, from_json, Addr, Attribute, Order, StdError, Uint128}; - use cw_utils::PaymentError; - - use crate::msg::CompressionAlgorithm::{Passthrough, Snappy}; - use std::any::type_name; - use testing::addr::{addr, CREATOR, SENDER}; - use testing::mock::mock_env_addr; - - fn decode_hex(hex: &str) -> Vec { - base16ct::lower::decode_vec(hex).unwrap() - } - - fn with_namespace(key: &[u8]) -> Vec { - let namespace = decode_hex("00064f424a454354"); - let mut v = Vec::with_capacity(namespace.len() + key.len()); - v.extend(namespace); - v.extend_from_slice(key); - v - } - - fn not_found_object_info(hex: &str) -> String { - let type_name = type_name::(); - let key = with_namespace(&decode_hex(hex)); - format!("type: {type_name}; key: {:02X?}", key) - } - - #[test] - fn proper_initialization() { - let mut deps = mock_dependencies(); - - let msg = InstantiateMsg { - owner: None, - bucket: "foo".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - let info = message_info(&addr(CREATOR), &[]); - - let res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap(); - assert_eq!(0, res.messages.len()); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Bucket {}).unwrap(); - let value: BucketResponse = from_json(&res).unwrap(); - assert_eq!("foo", value.name); - assert_eq!(value.config, Default::default()); - assert_eq!(value.limits, Default::default()); - assert_eq!(value.pagination.max_page_size, 30); - assert_eq!(value.pagination.default_page_size, 10); - - // check internal state too - let bucket = BUCKET.load(&deps.storage).unwrap(); - assert_eq!(Uint128::zero(), bucket.stat.size); - assert_eq!(Uint128::zero(), bucket.stat.object_count); - } - - #[test] - fn proper_config_initialization() { - let mut deps = mock_dependencies(); - - // Define the test cases - let test_cases = vec![ - (HashAlgorithm::MD5, HashAlgorithm::MD5), - (HashAlgorithm::Sha224, HashAlgorithm::Sha224), - (HashAlgorithm::Sha256, HashAlgorithm::Sha256), - (HashAlgorithm::Sha384, HashAlgorithm::Sha384), - (HashAlgorithm::Sha512, HashAlgorithm::Sha512), - ]; - - for (hash_algorithm, expected_hash_algorithm) in test_cases { - let msg = InstantiateMsg { - owner: None, - bucket: "bar".to_string(), - config: BucketConfig { - hash_algorithm, - ..Default::default() - }, - limits: Default::default(), - pagination: Default::default(), - }; - let info = message_info(&addr(CREATOR), &[]); - - let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap(); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Bucket {}).unwrap(); - let value: BucketResponse = from_json(&res).unwrap(); - - assert_eq!("bar", value.name); - assert_eq!(value.config.hash_algorithm, expected_hash_algorithm); - } - } - - #[test] - fn proper_limits_initialization() { - let mut deps = mock_dependencies(); - - let msg = InstantiateMsg { - owner: None, - bucket: "bar".to_string(), - config: Default::default(), - limits: BucketLimitsBuilder::default() - .max_total_size(Uint128::new(20000)) - .max_objects(Uint128::new(10)) - .max_object_size(Uint128::new(2000)) - .max_object_pins(Uint128::new(1)) - .build() - .unwrap(), - pagination: PaginationConfigBuilder::default() - .max_page_size(50) - .default_page_size(30) - .build() - .unwrap(), - }; - let info = message_info(&addr(CREATOR), &[]); - let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap(); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Bucket {}).unwrap(); - let response: BucketResponse = from_json(&res).unwrap(); - assert_eq!(response.name, "bar"); - assert_eq!( - response.limits, - BucketLimitsBuilder::default() - .max_total_size(Uint128::new(20000)) - .max_objects(Uint128::new(10)) - .max_object_size(Uint128::new(2000)) - .max_object_pins(Uint128::new(1)) - .build() - .unwrap(), - ); - assert_eq!( - response.pagination, - PaginationConfigBuilder::default() - .max_page_size(50) - .default_page_size(30) - .build() - .unwrap(), - ); - assert_eq!(response.stat, BucketStat::default()); - } - - #[test] - fn proper_pagination_initialization() { - let mut deps = mock_dependencies(); - let msg = InstantiateMsg { - owner: None, - bucket: "bar".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: PaginationConfigBuilder::default() - .max_page_size(50) - .default_page_size(30) - .build() - .unwrap(), - }; - instantiate( - deps.as_mut(), - mock_env(), - message_info(&addr(CREATOR), &[]), - msg, - ) - .unwrap(); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Bucket {}).unwrap(); - let value: BucketResponse = from_json(&res).unwrap(); - assert_eq!(value.pagination.max_page_size, 50); - assert_eq!(value.pagination.default_page_size, 30); - } - - #[test] - fn invalid_initialization() { - let cases = vec![ - ( - Default::default(), - Default::default(), - PaginationConfigBuilder::default() - .max_page_size(u32::MAX) - .build() - .unwrap(), - Some(StdError::generic_err( - "'max_page_size' cannot exceed 'u32::MAX - 1'", - )), - ), - ( - Default::default(), - Default::default(), - PaginationConfigBuilder::default() - .default_page_size(31) - .build() - .unwrap(), - Some(StdError::generic_err( - "'default_page_size' cannot exceed 'max_page_size'", - )), - ), - ( - Default::default(), - Default::default(), - PaginationConfigBuilder::default() - .default_page_size(701) - .max_page_size(700) - .build() - .unwrap(), - Some(StdError::generic_err( - "'default_page_size' cannot exceed 'max_page_size'", - )), - ), - ( - Default::default(), - Default::default(), - PaginationConfigBuilder::default() - .default_page_size(20) - .max_page_size(20) - .build() - .unwrap(), - None, - ), - ( - Default::default(), - Default::default(), - PaginationConfigBuilder::default() - .default_page_size(0) - .build() - .unwrap(), - Some(StdError::generic_err("'default_page_size' cannot be zero")), - ), - ( - Default::default(), - BucketLimitsBuilder::default() - .max_objects(0u128) - .build() - .unwrap(), - Default::default(), - Some(StdError::generic_err("'max_objects' cannot be zero")), - ), - ( - Default::default(), - BucketLimitsBuilder::default() - .max_object_size(0u128) - .build() - .unwrap(), - Default::default(), - Some(StdError::generic_err("'max_object_size' cannot be zero")), - ), - ( - Default::default(), - BucketLimitsBuilder::default() - .max_total_size(0u128) - .build() - .unwrap(), - Default::default(), - Some(StdError::generic_err("'max_total_size' cannot be zero")), - ), - ( - Default::default(), - BucketLimitsBuilder::default() - .max_total_size(10u128) - .max_object_size(20u128) - .build() - .unwrap(), - Default::default(), - Some(StdError::generic_err( - "'max_total_size' cannot be less than 'max_object_size'", - )), - ), - ( - Default::default(), - BucketLimitsBuilder::default() - .max_total_size(20u128) - .max_object_size(20u128) - .build() - .unwrap(), - Default::default(), - None, - ), - ( - BucketConfigBuilder::default() - .compression_algorithm(CompressionAlgorithm::Passthrough) - .build() - .unwrap(), - Default::default(), - Default::default(), - None, - ), - ( - Default::default(), - Default::default(), - Default::default(), - None, - ), - ]; - for case in cases { - let mut deps = mock_dependencies(); - let msg = InstantiateMsg { - owner: None, - bucket: "bar".to_string(), - config: case.0, - limits: case.1, - pagination: case.2, - }; - match instantiate( - deps.as_mut(), - mock_env(), - message_info(&addr(CREATOR), &[]), - msg, - ) { - Err(err) => { - assert!(case.3.is_some()); - assert_eq!(err, ContractError::Std(case.3.unwrap())) - } - _ => assert!(case.3.is_none()), - } - } - } - - #[test] - fn empty_name_initialization() { - let mut deps = mock_dependencies(); - - let msg = InstantiateMsg { - owner: None, - bucket: "".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - let info = message_info(&addr(CREATOR), &[]); - - let err = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap_err(); - - assert_eq!(err, ContractError::Bucket(BucketError::EmptyName)); - } - - #[test] - fn whitespace_initialization() { - let mut deps = mock_dependencies(); - - let msg = InstantiateMsg { - owner: None, - bucket: "foo bar".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - let info = message_info(&addr(CREATOR), &[]); - - let res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap(); - assert_eq!(0, res.messages.len()); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Bucket {}).unwrap(); - let value: BucketResponse = from_json(&res).unwrap(); - assert_eq!("foobar", value.name); - } - - #[test] - fn funds_initialization() { - let mut deps = mock_dependencies(); - let msg = InstantiateMsg { - owner: None, - bucket: "foo".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - let info = message_info(&addr(CREATOR), &coins(10, "uaxone")); - - let res = instantiate(deps.as_mut(), mock_env(), info, msg); - assert!(res.is_err()); - assert_eq!( - res.unwrap_err(), - ContractError::Payment(PaymentError::NonPayable {}) - ); - } - - #[test] - fn execute_fail_with_funds() { - let mut deps = mock_dependencies(); - let env = mock_env(); - let info = message_info(&addr(SENDER), &coins(10, "uaxone")); - - let messages = vec![ - ExecuteMsg::StoreObject { - data: Binary::from("data".as_bytes()), - pin: false, - }, - ExecuteMsg::PinObject { - id: "object_id".to_string(), - }, - ExecuteMsg::UnpinObject { - id: "object_id".to_string(), - }, - ExecuteMsg::ForgetObject { - id: "object_id".to_string(), - }, - ]; - - for msg in messages { - let result = execute(deps.as_mut(), env.clone(), info.clone(), msg); - assert!(result.is_err()); - assert_eq!( - result.unwrap_err(), - ContractError::Payment(PaymentError::NonPayable {}) - ); - } - } - - #[test] - fn store_object_without_limits() { - let obj1_content = &general_purpose::STANDARD.encode("hello"); - let obj2_content = &general_purpose::STANDARD.encode("okp4"); - - let test_cases = vec![ - ( - HashAlgorithm::MD5, - vec![ - ( - obj1_content, - true, - "5d41402abc4b2a76b9719d911017c592" - .to_string(), - 5, - vec![ - Attribute::new("action", "store_object"), - Attribute::new("id", "5d41402abc4b2a76b9719d911017c592" - .to_string()), - Attribute::new("size", "5"), - Attribute::new("compressed_size", "5"), - Attribute::new("pinned", "true"), - ] - ), - ( - obj2_content, - false, - "33f41d49353ad1a876e36918f64eac4d" - .to_string(), - 4, - vec![ - Attribute::new("action", "store_object"), - Attribute::new("id", "33f41d49353ad1a876e36918f64eac4d" - .to_string()), - Attribute::new("size", "4"), - Attribute::new("compressed_size", "4"), - Attribute::new("pinned", "false"), - ] - ), - ], - ), - ( - HashAlgorithm::Sha224, - vec![ - ( - obj1_content, - true, - "ea09ae9cc6768c50fcee903ed054556e5bfc8347907f12598aa24193" - .to_string(), - 5, - vec![ - Attribute::new("action", "store_object"), - Attribute::new("id", "ea09ae9cc6768c50fcee903ed054556e5bfc8347907f12598aa24193" - .to_string()), - Attribute::new("size", "5"), - Attribute::new("compressed_size", "5"), - Attribute::new("pinned", "true"), - ] - ), - ( - obj2_content, - false, - "fe798aa30e560c57d69c46982b2bb1320dc86813730bb7c6406ce84b" - .to_string(), - 4, - vec![ - Attribute::new("action", "store_object"), - Attribute::new("id", "fe798aa30e560c57d69c46982b2bb1320dc86813730bb7c6406ce84b" - .to_string()), - Attribute::new("size", "4"), - Attribute::new("compressed_size", "4"), - Attribute::new("pinned", "false"), - ] - ), - ], - ), - ( - HashAlgorithm::Sha256, - vec![ - ( - obj1_content, - true, - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824" - .to_string(), - 5, - vec![ - Attribute::new("action", "store_object"), - Attribute::new("id", "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824" - .to_string()), - Attribute::new("size", "5"), - Attribute::new("compressed_size", "5"), - Attribute::new("pinned", "true"), - ] - ), - ( - obj2_content, - false, - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6" - .to_string(), - 4, - vec![ - Attribute::new("action", "store_object"), - Attribute::new("id", "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6" - .to_string()), - Attribute::new("size", "4"), - Attribute::new("compressed_size", "4"), - Attribute::new("pinned", "false"), - ] - ), - ], - ), - ( - HashAlgorithm::Sha384, - vec![ - ( - obj1_content, - true, - "59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c3553bcdb9c666fa90125a3c79f90397bdf5f6a13de828684f" - .to_string(), - 5, - vec![ - Attribute::new("action", "store_object"), - Attribute::new("id", "59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c3553bcdb9c666fa90125a3c79f90397bdf5f6a13de828684f" - .to_string()), - Attribute::new("size", "5"), - Attribute::new("compressed_size", "5"), - Attribute::new("pinned", "true"), - ] - ), - ( - obj2_content, - false, - "e700b122a81f64ce34ab67c6a815987536a05b0590bbeb32cf5e88963edd8c6e69c9e43b0f957f242d984f09f91bcaf2" - .to_string(), - 4, - vec![ - Attribute::new("action", "store_object"), - Attribute::new("id", "e700b122a81f64ce34ab67c6a815987536a05b0590bbeb32cf5e88963edd8c6e69c9e43b0f957f242d984f09f91bcaf2" - .to_string()), - Attribute::new("size", "4"), - Attribute::new("compressed_size", "4"), - Attribute::new("pinned", "false"), - ] - ), - ], - ), - ( - HashAlgorithm::Sha512, - vec![ - ( - obj1_content, - true, - "9b71d224bd62f3785d96d46ad3ea3d73319bfbc2890caadae2dff72519673ca72323c3d99ba5c11d7c7acc6e14b8c5da0c4663475c2e5c3adef46f73bcdec043" - .to_string(), - 5, - vec![ - Attribute::new("action", "store_object"), - Attribute::new("id", "9b71d224bd62f3785d96d46ad3ea3d73319bfbc2890caadae2dff72519673ca72323c3d99ba5c11d7c7acc6e14b8c5da0c4663475c2e5c3adef46f73bcdec043" - .to_string()), - Attribute::new("size", "5"), - Attribute::new("compressed_size", "5"), - Attribute::new("pinned", "true"), - ] - ), - ( - obj2_content, - false, - "e4f4025e1e28abb473c89bcae03ded972e91b4427e8970be87f645cc34b9b203d633c12760e32c97011439640cba159f60992e10aac8023fa2577cadc1be3b55" - .to_string(), - 4, - vec![ - Attribute::new("action", "store_object"), - Attribute::new("id", "e4f4025e1e28abb473c89bcae03ded972e91b4427e8970be87f645cc34b9b203d633c12760e32c97011439640cba159f60992e10aac8023fa2577cadc1be3b55" - .to_string()), - Attribute::new("size", "4"), - Attribute::new("compressed_size", "4"), - Attribute::new("pinned", "false"), - ] - ), - ], - ), - ]; - - for (hash_algorithm, objs) in test_cases { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg { - owner: None, - bucket: "test".to_string(), - config: BucketConfig { - hash_algorithm, - ..Default::default() - }, - limits: Default::default(), - pagination: Default::default(), - }, - ) - .unwrap(); - - for (content, pin, expected_hash, expected_size, expected_attr) in &objs { - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(content).unwrap(), - pin: *pin, - }; - let res = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - assert_eq!(res.attributes, *expected_attr); - - assert_eq!( - Binary::from_base64(content).unwrap(), - Binary::from( - DATA.load(&deps.storage, decode_hex(&expected_hash).into()) - .unwrap() - ), - ); - - let created = OBJECT - .load(&deps.storage, decode_hex(&expected_hash).into()) - .unwrap(); - assert_eq!(created.id, decode_hex(&expected_hash).into()); - assert_eq!(created.size.u128(), *expected_size); - assert_eq!( - created.pin_count, - if *pin { - Uint128::one() - } else { - Uint128::zero() - } - ); - - assert_eq!( - pins().has( - &deps.storage, - (decode_hex(&expected_hash).into(), info.clone().sender), - ), - *pin, - ); - } - - let bucket = BUCKET.load(&deps.storage).unwrap(); - assert_eq!( - bucket.stat.size.u128(), - objs.iter().map(|x| x.3).sum::() - ); - assert_eq!( - bucket.stat.object_count.u128(), - u128::try_from(objs.len()).unwrap() - ); - assert_eq!( - OBJECT - .keys_raw(&deps.storage, None, None, Order::Ascending) - .count(), - 2 - ); - assert_eq!( - pins() - .keys_raw(&deps.storage, None, None, Order::Ascending) - .count(), - 1 - ); - } - } - - #[test] - fn store_object_already_stored() { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - let msg = InstantiateMsg { - owner: None, - bucket: String::from("test"), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - instantiate(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let object = general_purpose::STANDARD.encode("already existing object"); - execute( - deps.as_mut(), - mock_env(), - info.clone(), - ExecuteMsg::StoreObject { - data: Binary::from_base64(object.as_str()).unwrap(), - pin: false, - }, - ) - .unwrap(); - - let res = execute( - deps.as_mut(), - mock_env(), - info.clone(), - ExecuteMsg::StoreObject { - data: Binary::from_base64(object.as_str()).unwrap(), - pin: true, - }, - ); - - assert!(res.is_ok()); - assert!(pins().has( - &deps.storage, - ( - decode_hex("46c4b2f687df251a98cc83cc35437e9893c16861899c2f9d183e1de57d3a2c0e") - .into(), - info.sender - ), - )); - assert_eq!( - OBJECT - .load( - &deps.storage, - decode_hex("46c4b2f687df251a98cc83cc35437e9893c16861899c2f9d183e1de57d3a2c0e") - .into() - ) - .unwrap() - .pin_count, - Uint128::one() - ); - } - - #[test] - fn store_object_limits() { - let cases = vec![ - ( - BucketLimitsBuilder::default() - .max_objects(2u128) - .build() - .unwrap(), - None, - ), - ( - BucketLimitsBuilder::default() - .max_object_size(5u128) - .build() - .unwrap(), - None, - ), - ( - BucketLimitsBuilder::default() - .max_total_size(9u128) - .build() - .unwrap(), - None, - ), - ( - BucketLimitsBuilder::default() - .max_object_pins(1u128) - .build() - .unwrap(), - None, - ), - ( - BucketLimitsBuilder::default() - .max_objects(1u128) - .build() - .unwrap(), - Some(ContractError::Bucket(BucketError::MaxObjectsLimitExceeded( - 2u128.into(), - 1u128.into(), - ))), - ), - ( - BucketLimitsBuilder::default() - .max_object_size(4u128) - .build() - .unwrap(), - Some(ContractError::Bucket( - BucketError::MaxObjectSizeLimitExceeded(5u128.into(), 4u128.into()), - )), - ), - ( - BucketLimitsBuilder::default() - .max_total_size(8u128) - .build() - .unwrap(), - Some(ContractError::Bucket( - BucketError::MaxTotalSizeLimitExceeded(9u128.into(), 8u128.into()), - )), - ), - ( - BucketLimitsBuilder::default() - .max_object_pins(0u128) - .build() - .unwrap(), - Some(ContractError::Bucket( - BucketError::MaxObjectPinsLimitExceeded(1u128.into(), 0u128.into()), - )), - ), - ]; - - let obj1 = general_purpose::STANDARD.encode("okp4"); - let obj2 = general_purpose::STANDARD.encode("hello"); - - for case in cases { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - let msg = InstantiateMsg { - owner: None, - bucket: String::from("test"), - config: Default::default(), - limits: case.0, - pagination: Default::default(), - }; - instantiate(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(obj1.as_str()).unwrap(), - pin: false, - }; - execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(obj2.as_str()).unwrap(), - pin: true, - }; - let res = execute(deps.as_mut(), mock_env(), info.clone(), msg); - - assert_eq!(res.err(), case.1); - } - } - - #[test] - fn store_object_with_bucket_compression() { - let test_cases = vec![ - (CompressionAlgorithm::Passthrough, "hello"), - (CompressionAlgorithm::Snappy, "hello"), - (CompressionAlgorithm::Lzma, "hello"), - ]; - - for (compression_algorithm, test_data) in test_cases { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - - let data = Binary::from_base64(&general_purpose::STANDARD.encode(test_data)).unwrap(); - - // Initialize bucket with specific compression algorithm - let msg = InstantiateMsg { - owner: None, - bucket: String::from("test"), - config: BucketConfig { - hash_algorithm: HashAlgorithm::Sha256, - compression_algorithm, - }, - limits: Default::default(), - pagination: Default::default(), - }; - instantiate(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - // Store object - should use bucket's compression algorithm - let res = execute( - deps.as_mut(), - mock_env(), - info.clone(), - ExecuteMsg::StoreObject { - data: data.clone(), - pin: false, - }, - ); - assert!(res.is_ok()); - - let res = res.unwrap(); - // Just verify that the object was stored successfully - assert_eq!(res.attributes[0], Attribute::new("action", "store_object")); - assert!(res - .attributes - .iter() - .any(|attr| attr.key == "compressed_size")); - } - } - - #[test] - fn store_object_check_attributes() { - let obj_content = &general_purpose::STANDARD.encode("hello"); - let obj_exist_content = &general_purpose::STANDARD.encode("axone"); - let obj_exist_pinned_content = &general_purpose::STANDARD.encode("protocol"); - let obj_large_content = &general_purpose::STANDARD.encode("In a world of interconnected systems, there were countless realms, one of which was known as AXONE. Within this realm, AI researchers harnessed the power to create collaborative AI models. As the realm expanded, the researchers used their expertise to power collaborative systems, bringing innovation and advancement to all who sought their knowledge. And so, the legend of the AI researchers and their collaborative AI models lived on, inspiring future generations to unlock the potential of the digital realm with AI."); - - let test_cases = vec![ - ( - obj_content, - true, - Passthrough, - vec![ - Attribute::new("action", "store_object"), - Attribute::new( - "id", - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824" - .to_string(), - ), - Attribute::new("size", "5"), - Attribute::new("compressed_size", "5"), - Attribute::new("pinned", "true"), - ], - ), - ( - obj_content, - false, - Passthrough, - vec![ - Attribute::new("action", "store_object"), - Attribute::new( - "id", - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824" - .to_string(), - ), - Attribute::new("size", "5"), - Attribute::new("compressed_size", "5"), - Attribute::new("pinned", "false"), - ], - ), - ( - obj_large_content, - true, - Snappy, - vec![ - Attribute::new("action", "store_object"), - Attribute::new( - "id", - "afb9c7804a3515714a3ec2313c990df31d54000b890ae677dcaaa1060b437660" - .to_string(), - ), - Attribute::new("size", "519"), - Attribute::new("compressed_size", "519"), - Attribute::new("pinned", "true"), - ], - ), - ( - obj_exist_content, - true, - Passthrough, - vec![ - Attribute::new("action", "store_object"), - Attribute::new( - "id", - "45a8243ff863a08531c666569ce9997b63df94c2e2aeedaed3d32656ee1ae622" - .to_string(), - ), - Attribute::new("pinned", "true"), - ], - ), - ( - obj_exist_content, - false, - Passthrough, - vec![ - Attribute::new("action", "store_object"), - Attribute::new( - "id", - "45a8243ff863a08531c666569ce9997b63df94c2e2aeedaed3d32656ee1ae622" - .to_string(), - ), - Attribute::new("pinned", "false"), - ], - ), - ( - obj_exist_pinned_content, - false, - Passthrough, - vec![ - Attribute::new("action", "store_object"), - Attribute::new( - "id", - "2ea88c7a30351b12a4dcfc06cdce2af6eab18416176466c2500cb6ef74f745bf" - .to_string(), - ), - Attribute::new("pinned", "false"), - ], - ), - ( - obj_exist_pinned_content, - true, - Passthrough, - vec![ - Attribute::new("action", "store_object"), - Attribute::new( - "id", - "2ea88c7a30351b12a4dcfc06cdce2af6eab18416176466c2500cb6ef74f745bf" - .to_string(), - ), - Attribute::new("pinned", "false"), - ], - ), - ]; - - for (content, pin, _compression_algorithm, expected_attr) in &test_cases { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - let msg = InstantiateMsg { - owner: None, - bucket: String::from("test"), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - instantiate(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - _ = execute( - deps.as_mut(), - mock_env(), - info.clone(), - ExecuteMsg::StoreObject { - data: Binary::from_base64(obj_exist_content).unwrap(), - pin: false, - }, - ); - - _ = execute( - deps.as_mut(), - mock_env(), - info.clone(), - ExecuteMsg::StoreObject { - data: Binary::from_base64(obj_exist_pinned_content).unwrap(), - pin: true, - }, - ); - - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(content).unwrap(), - pin: *pin, - }; - - let res = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - assert_eq!(res.attributes, *expected_attr); - } - } - - #[test] - fn object() { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - - let msg = InstantiateMsg { - owner: None, - bucket: String::from("test"), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - instantiate(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - match query( - deps.as_ref(), - mock_env(), - QueryMsg::Object { - id: "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824".to_string(), - }, - ) - .err() - .unwrap() - { - NotFound { .. } => (), - _ => panic!("assertion failed"), - } - - let data = general_purpose::STANDARD.encode("hello"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: true, - }; - execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let msg = QueryMsg::Object { - id: "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824".to_string(), - }; - let result = query(deps.as_ref(), mock_env(), msg).unwrap(); - let response: ObjectResponse = from_json(&result).unwrap(); - assert_eq!( - response.id, - ObjectId::from("2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824") - ); - assert!(response.is_pinned); - assert_eq!(response.size.u128(), 5u128); - - let data = general_purpose::STANDARD.encode("okp4"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let msg = QueryMsg::Object { - id: "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6".to_string(), - }; - let result = query(deps.as_ref(), mock_env(), msg).unwrap(); - let response: ObjectResponse = from_json(&result).unwrap(); - assert_eq!( - response.id, - ObjectId::from("315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6") - ); - assert!(!response.is_pinned); - assert_eq!(response.size.u128(), 4u128); - } - - #[test] - fn object_data() { - let test_cases = vec![ - CompressionAlgorithm::Passthrough, - CompressionAlgorithm::Snappy, - CompressionAlgorithm::Lzma, - ]; - - for compression_algorithm in test_cases { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - let data = - Binary::from_base64(general_purpose::STANDARD.encode("okp4").as_str()).unwrap(); - - let msg = InstantiateMsg { - owner: None, - bucket: String::from("test"), - config: BucketConfig { - hash_algorithm: HashAlgorithm::Sha256, - compression_algorithm, - }, - limits: Default::default(), - pagination: Default::default(), - }; - instantiate(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - match query( - deps.as_ref(), - mock_env(), - QueryMsg::ObjectData { - id: "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6" - .to_string(), - }, - ) - .err() - .unwrap() - { - NotFound { .. } => (), - _ => panic!("assertion failed"), - } - - let msg = ExecuteMsg::StoreObject { - data: data.clone(), - pin: false, - }; - execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let msg = QueryMsg::ObjectData { - id: "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6".to_string(), - }; - let result = query(deps.as_ref(), mock_env(), msg).unwrap(); - assert_eq!(result, to_json_binary(&data).unwrap()); - } - } - - #[test] - fn object_data_error() { - let mut deps = mock_dependencies(); - let id: Hash = vec![1, 2, 3].into(); - let data = &vec![255, 255, 0]; - - let object = &Object { - id: id.clone(), - size: 42u8.into(), - pin_count: Uint128::one(), - compression: compress::CompressionAlgorithm::Lzma, - compressed_size: Uint128::from(data.len() as u128), - }; - - OBJECT - .save(deps.as_mut().storage, object.id.clone(), object) - .expect("no error when storing object"); - let data_path = DATA.key(id.clone()); - data_path - .save(deps.as_mut().storage, &data) - .expect("no error when storing data"); - - let msg = QueryMsg::ObjectData { id: id.to_string() }; - - let result = query(deps.as_ref(), mock_env(), msg); - assert_eq!( - result, - Err(StdError::serialize_err( - "Lzma", - "lzma error: LZMA header invalid properties: 255 must be < 225" - )) - ); - } - - #[test] - fn pin_object() { - struct TC { - objects: Vec, - senders: Vec, - expected_count: usize, - expected_error: Option, - expected_object_pin_count: Vec<(ObjectId, Uint128)>, - } - - let cases = vec![ - TC { - // One object, 1 one pinner => 1 pin - objects: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - senders: vec![message_info(&addr("bob"), &[])], - expected_count: 1, - expected_error: None, - expected_object_pin_count: vec![( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::one(), - )], - }, - TC { - // Same object, two pinners => 2 pin - objects: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ], - senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("alice"), &[]), - ], - expected_count: 2, - expected_error: None, - expected_object_pin_count: vec![( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::new(2), - )], - }, - TC { - // Same object, one pinner twice => 1 pin - objects: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ], - senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("bob"), &[]), - ], - expected_count: 1, - expected_error: None, - expected_object_pin_count: vec![( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::one(), - )], - }, - TC { - // two objects, same pinner => 2 pin - objects: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ], - senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("bob"), &[]), - ], - expected_count: 2, - expected_error: None, - expected_object_pin_count: vec![ - ( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::one(), - ), - ( - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - Uint128::one(), - ), - ], - }, - TC { - // two objects, two pinner => 2 pin - objects: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ], - senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("alice"), &[]), - ], - expected_count: 2, - expected_error: None, - expected_object_pin_count: vec![ - ( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::one(), - ), - ( - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - Uint128::one(), - ), - ], - }, - TC { - // two objects, two pinner, twice 1 pinner => 2 pin - objects: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ], - senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("alice"), &[]), - message_info(&addr("alice"), &[]), - ], - expected_count: 2, - expected_error: None, - expected_object_pin_count: vec![ - ( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::one(), - ), - ( - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - Uint128::one(), - ), - ], - }, - TC { - // exceed limits - objects: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ], - senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("alice"), &[]), - message_info(&addr("martin"), &[]), - message_info(&addr("pierre"), &[]), - ], - expected_count: 3, - expected_error: Some(ContractError::Bucket( - BucketError::MaxObjectPinsLimitExceeded(Uint128::new(3), Uint128::new(2)), - )), - expected_object_pin_count: vec![ - ( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::one(), - ), - ( - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - Uint128::new(2), - ), - ], - }, - TC { - // Object not exists - objects: vec![ObjectId::from( - "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56", - )], - senders: vec![message_info(&addr("bob"), &[])], - expected_count: 0, - expected_error: Some(ContractError::Std(StdError::not_found( - not_found_object_info::( - "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56", - ), - ))), - expected_object_pin_count: vec![( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::zero(), - )], - }, - TC { - // Invalid object id - objects: vec![ObjectId::from("invalid id")], - senders: vec![message_info(&addr("bob"), &[])], - expected_count: 0, - expected_error: Some(ContractError::Std(StdError::parse_err( - type_name::>(), - "invalid Base16 encoding".to_string(), - ))), - expected_object_pin_count: vec![( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::zero(), - )], - }, - ]; - - for case in cases { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg { - owner: None, - bucket: "test".to_string(), - config: Default::default(), - limits: BucketLimitsBuilder::default() - .max_object_pins(Uint128::new(2)) - .build() - .unwrap(), - pagination: Default::default(), - }, - ) - .unwrap(); - - let data = general_purpose::STANDARD.encode("okp4"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - let _ = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let data = general_purpose::STANDARD.encode("data"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - let _ = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let data = general_purpose::STANDARD.encode("hello"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - let _ = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let mut last_result: Option> = None; - case.objects - .iter() - .zip(case.senders) - .for_each(|(object_id, info)| { - last_result = Some(execute( - deps.as_mut(), - mock_env(), - info, - ExecuteMsg::PinObject { - id: object_id.clone(), - }, - )); - }); - - match case.expected_error { - Some(err) => assert_eq!(last_result.unwrap().unwrap_err(), err), - _ => { - assert_eq!( - pins() - .keys_raw(&deps.storage, None, None, Order::Ascending) - .count(), - case.expected_count - ); - for (object_id, count) in case.expected_object_pin_count { - assert_eq!( - OBJECT - .load(&deps.storage, decode_hex(&object_id).into()) - .unwrap() - .pin_count, - count - ); - } - } - } - } - } - - #[test] - fn unpin_object() { - struct TC { - pin: Vec, - pin_senders: Vec, - unpin: Vec, - unpin_senders: Vec, - expected_count: usize, - expected_error: Option, - expected_object_pin_count: Vec<(ObjectId, Uint128)>, - } - - let cases = vec![ - TC { - pin: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - pin_senders: vec![message_info(&addr("bob"), &[])], - unpin: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - unpin_senders: vec![message_info(&addr("bob"), &[])], - expected_count: 0, - expected_error: None, - expected_object_pin_count: vec![( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::zero(), - )], - }, - TC { - pin: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - pin_senders: vec![message_info(&addr("bob"), &[])], - unpin: vec![ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - )], - unpin_senders: vec![message_info(&addr("bob"), &[])], - expected_count: 1, - expected_error: None, - expected_object_pin_count: vec![ - ( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::one(), - ), - ( - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - Uint128::zero(), - ), - ], - }, - TC { - pin: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ], - pin_senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("bob"), &[]), - ], - unpin: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - unpin_senders: vec![message_info(&addr("bob"), &[])], - expected_count: 1, - expected_error: None, - expected_object_pin_count: vec![ - ( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::zero(), - ), - ( - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - Uint128::one(), - ), - ], - }, - TC { - pin: vec![], - pin_senders: vec![], - unpin: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - unpin_senders: vec![message_info(&addr("bob"), &[])], - expected_count: 0, - expected_error: None, - expected_object_pin_count: vec![( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::zero(), - )], - }, - TC { - pin: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ], - pin_senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("alice"), &[]), - message_info(&addr("martin"), &[]), - message_info(&addr("pierre"), &[]), - ], - unpin: vec![ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - )], - unpin_senders: vec![message_info(&addr("martin"), &[])], - expected_count: 3, - expected_error: None, - expected_object_pin_count: vec![ - ( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::one(), - ), - ( - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - Uint128::new(2), - ), - ], - }, - TC { - // Object not exists - pin: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - pin_senders: vec![message_info(&addr("bob"), &[])], - unpin: vec![ObjectId::from( - "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56", - )], - unpin_senders: vec![message_info(&addr("martin"), &[])], - expected_count: 1, - expected_error: Some(ContractError::Std(StdError::not_found( - not_found_object_info::( - "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56", - ), - ))), - expected_object_pin_count: vec![( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::one(), - )], - }, - TC { - // Invalid object id - pin: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - pin_senders: vec![message_info(&addr("bob"), &[])], - unpin: vec![ObjectId::from("invalid id")], - unpin_senders: vec![message_info(&addr("martin"), &[])], - expected_count: 1, - expected_error: Some(ContractError::Std(StdError::parse_err( - type_name::>(), - "invalid Base16 encoding".to_string(), - ))), - expected_object_pin_count: vec![( - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - Uint128::one(), - )], - }, - ]; - - for case in cases { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg { - owner: None, - bucket: "test".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }, - ) - .unwrap(); - - let data = general_purpose::STANDARD.encode("okp4"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - let _ = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let data = general_purpose::STANDARD.encode("data"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - let _ = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let data = general_purpose::STANDARD.encode("hello"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - let _ = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let mut last_result: Option> = None; - case.pin - .iter() - .zip(case.pin_senders) - .for_each(|(object_id, info)| { - last_result = Some(execute( - deps.as_mut(), - mock_env(), - info, - ExecuteMsg::PinObject { - id: object_id.clone(), - }, - )); - }); - case.unpin - .iter() - .zip(case.unpin_senders) - .for_each(|(object_id, info)| { - last_result = Some(execute( - deps.as_mut(), - mock_env(), - info, - ExecuteMsg::UnpinObject { - id: object_id.clone(), - }, - )); - }); - - match case.expected_error { - Some(err) => assert_eq!(last_result.unwrap().unwrap_err(), err), - _ => { - assert_eq!( - pins() - .keys_raw(&deps.storage, None, None, Order::Ascending) - .count(), - case.expected_count - ); - for (object_id, count) in case.expected_object_pin_count { - assert_eq!( - OBJECT - .load(&deps.storage, decode_hex(&object_id).into()) - .unwrap() - .pin_count, - count - ); - } - } - } - } - } - - #[test] - fn objects() { - let mut deps = mock_dependencies(); - let creator1 = addr("creator1"); - let creator2 = addr("creator2"); - - let info1 = message_info(&creator1, &[]); - let info2 = message_info(&creator2, &[]); - - // 1. Instantiate - instantiate( - deps.as_mut(), - mock_env(), - info1.clone(), - InstantiateMsg { - owner: None, - bucket: "test".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }, - ) - .unwrap(); - - // 2. No objects yet - let response: ObjectsResponse = from_json( - &query( - deps.as_ref(), - mock_env(), - QueryMsg::Objects { - first: None, - after: None, - }, - ) - .unwrap(), - ) - .unwrap(); - assert_eq!(response.data.len(), 0); - assert!(!response.page_info.has_next_page); - assert_eq!(response.page_info.cursor, "".to_string()); - - // 3. Store 3 objects - // creator1 stores 2 (one pinned, one not), creator2 stores 1 (pinned) - for (data, info, pin) in vec![ - ("object1", &info1, false), - ("object2", &info1, true), - ("object3", &info2, true), - ] { - execute( - deps.as_mut(), - mock_env(), - info.clone(), - ExecuteMsg::StoreObject { - data: Binary::from_base64(general_purpose::STANDARD.encode(data).as_str()) - .unwrap(), - pin, - }, - ) - .unwrap(); - } - - // 4. Fetch all objects (no pinned_by) => should return 3 - let response: ObjectsResponse = from_json( - &query( - deps.as_ref(), - mock_env(), - QueryMsg::Objects { - first: None, - after: None, - }, - ) - .unwrap(), - ) - .unwrap(); - assert_eq!(response.data.len(), 3); - assert!(!response.page_info.has_next_page); - assert_eq!( - response.page_info.cursor, - "F6Q2ghnctaKMHREiWKgQtSUBvcy2J1jajNNY3zzUoMFpH52jo".to_string() - ); - - // 5. Fetch with pagination (first: 1) - let first_page: ObjectsResponse = from_json( - &query( - deps.as_ref(), - mock_env(), - QueryMsg::Objects { - first: Some(1), - after: None, - }, - ) - .unwrap(), - ) - .unwrap(); - assert_eq!(first_page.data.len(), 1); - assert!(first_page.page_info.has_next_page); - assert_eq!( - first_page.page_info.cursor, - "F6Q2gWw8q14Q37Hkdnu45eWE9YBvr4FmokRvRt7m1sVERuHoy".to_string() - ); - - // 6. Fetch second page (after cursor from previous) - let second_page: ObjectsResponse = from_json( - &query( - deps.as_ref(), - mock_env(), - QueryMsg::Objects { - first: Some(2), - after: Some(first_page.page_info.cursor), - }, - ) - .unwrap(), - ) - .unwrap(); - assert_eq!(second_page.data.len(), 2); - assert!(!second_page.page_info.has_next_page); - assert_eq!( - second_page.page_info.cursor, - "F6Q2ghnctaKMHREiWKgQtSUBvcy2J1jajNNY3zzUoMFpH52jo".to_string() - ); - } - - #[test] - fn query_objects_pinned_by() { - let mut deps = mock_dependencies(); - let creator1 = addr("creator1"); - let creator2 = addr("creator2"); - - let info1 = message_info(&creator1, &[]); - let info2 = message_info(&creator2, &[]); - - instantiate( - deps.as_mut(), - mock_env(), - info1.clone(), - InstantiateMsg { - owner: None, - bucket: "test".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }, - ) - .unwrap(); - - let objects = vec![ - ("object1", &info1, false), - ("object2", &info1, true), - ("object3", &info2, true), - ("object4", &info2, true), - ("object5", &info1, true), - ]; - - for (data, info, pin) in objects { - execute( - deps.as_mut(), - mock_env(), - info.clone(), - ExecuteMsg::StoreObject { - data: Binary::from_base64(general_purpose::STANDARD.encode(data).as_str()) - .unwrap(), - pin, - }, - ) - .unwrap(); - } - - let cases = vec![ - (creator1.clone(), None, None, 2), - (creator2.clone(), None, None, 2), - (creator1.clone(), Some(1), None, 1), - (creator1, None, Some("D4menWGWo3hzhXpexzE6TTu8w9qU2Mcundpv13CsWP1osLVet7mpmtizLDNQbEeqvJcFQ5Gtn1wixWVRQySUxsxW7mH6yt7MrsC4MX4yRykaqza53PxFY5fZkwVmTC8PkrEoPWDGTS1mboh81T".to_string()), 1), - (creator2, Some(1), None, 1), - ]; - - for (address, first, after, expected_count) in cases { - let result: ObjectsResponse = from_json( - &query( - deps.as_ref(), - mock_env(), - QueryMsg::ObjectsPinnedBy { - address: address.to_string(), - first, - after, - }, - ) - .unwrap(), - ) - .unwrap(); - - assert_eq!(result.data.len(), expected_count); - } - } - - #[test] - fn pins_for_object() { - let mut deps = mock_dependencies(); - let info1 = message_info(&addr("creator1"), &[]); - let info2 = message_info(&addr("creator2"), &[]); - - let msg = InstantiateMsg { - owner: None, - bucket: String::from("test"), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - instantiate(deps.as_mut(), mock_env(), info1.clone(), msg).unwrap(); - - let data = general_purpose::STANDARD.encode("object1"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - execute(deps.as_mut(), mock_env(), info1.clone(), msg).unwrap(); - // 1: 445008b7f2932922bdb184771d9978516a4f89d77000c2d6eab18b0894aac3a7 - let data = general_purpose::STANDARD.encode("object2"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: true, - }; - execute(deps.as_mut(), mock_env(), info2, msg).unwrap(); - // 2: abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56 - let msg = ExecuteMsg::PinObject { - id: "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56".to_string(), - }; - execute(deps.as_mut(), mock_env(), info1, msg).unwrap(); - - let cases = vec![ - ( - QueryMsg::PinsForObject { - object_id: "445008b7f2932922bdb184771d9978516a4f89d77000c2d6eab18b0894aac3a7" - .to_string(), - first: None, - after: None, - }, - Vec::::new(), - PageInfo { - has_next_page: false, - cursor: "".to_string(), - }, - ), - ( - QueryMsg::PinsForObject { - object_id: "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56" - .to_string(), - first: None, - after: None, - }, - vec![addr("creator2"), addr("creator1")], - PageInfo { - has_next_page: false, - cursor: "D4meneVbphiLfeydC5DumtwHVA59Z8wMnpifEZzV9w8ANo2YNxFahwC6L3Y9caxvKwTsZCngnp21pkczRLSQoEfEiY5udrruknAGDThEUVVuVJSsB5VXg14K8NK9nsq3VWuBPDWVzi3VhWEbg1".to_string(), - }, - ), - ( - QueryMsg::PinsForObject { - object_id: "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56" - .to_string(), - first: Some(1), - after: None, - }, - vec![addr("creator2")], - PageInfo { - has_next_page: true, - cursor: "D4meneVbphiLfeydC5DumtwHVA59Z8wMnpifEZzV9w8ANo2YNxFahwC6L3Y9caxvKwTorkur64NDErbV3tzjNnooXsjfvXVsWZrzk11KqD3HQw2sgRMTxDQtAzR8cNNT8fGo7aZTtgqEfSJrBN".to_string(), - }, - ), - ( - QueryMsg::PinsForObject { - object_id: "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56" - .to_string(), - first: Some(1), - after: Some("D4meneVbphiLfeydC5DumtwHVA59Z8wMnpifEZzV9w8ANo2YNxFahwC6L3Y9caxvKwTorkur64NDErbV3tzjNnooXsjfvXVsWZrzk11KqD3HQw2sgRMTxDQtAzR8cNNT8fGo7aZTtgqEfSJrBN".to_string()), - }, - vec![addr("creator1")], - PageInfo { - has_next_page: false, - cursor: "D4meneVbphiLfeydC5DumtwHVA59Z8wMnpifEZzV9w8ANo2YNxFahwC6L3Y9caxvKwTsZCngnp21pkczRLSQoEfEiY5udrruknAGDThEUVVuVJSsB5VXg14K8NK9nsq3VWuBPDWVzi3VhWEbg1".to_string(), - }, - ), - ]; - - for (n, case) in cases.into_iter().enumerate() { - let result = query(deps.as_ref(), mock_env_addr(), case.0).unwrap(); - let response: PinsForObjectResponse = from_json(&result).unwrap(); - assert_eq!( - response - .data - .iter() - .map(|a| Addr::unchecked(a)) - .collect::>(), - case.1, - "case: {}", - n - ); - assert_eq!(response.page_info, case.2, "case: {}", n); - } - } - - #[test] - fn object_pins_errors() { - let mut deps = mock_dependencies(); - - let msg = InstantiateMsg { - owner: None, - bucket: String::from("test"), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - instantiate( - deps.as_mut(), - mock_env(), - message_info(&addr("creator1"), &[]), - msg, - ) - .unwrap(); - - let cases = vec![ - ( - QueryMsg::PinsForObject { - object_id: "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56" - .to_string(), - after: None, - first: None, - }, - StdError::not_found(not_found_object_info::( - "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56", - )), - ), - ( - QueryMsg::PinsForObject { - object_id: "invalid id".to_string(), - after: None, - first: None, - }, - StdError::parse_err( - type_name::>(), - "invalid Base16 encoding".to_string(), - ), - ), - ]; - - for (n, case) in cases.into_iter().enumerate() { - let res = query(deps.as_ref(), mock_env(), case.0).err().unwrap(); - assert_eq!(res, case.1, "case: {}", n) - } - } - - #[test] - fn forget_object() { - struct TC { - pins: Vec, - pins_senders: Vec, - forget_objects: Vec, - forget_senders: Vec, - expected_count: usize, - expected_total_size: Uint128, - expected_compressed_size: Uint128, - expected_error: Option, - } - - let cases = vec![ - TC { - pins: vec![], - pins_senders: vec![], - forget_objects: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - forget_senders: vec![message_info(&addr("bob"), &[])], - expected_count: 3, - expected_total_size: Uint128::new(474), - expected_compressed_size: Uint128::new(474), - expected_error: None, - }, - TC { - pins: vec![], - pins_senders: vec![], - forget_objects: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", - ), - ], - forget_senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("bob"), &[]), - ], - expected_count: 2, - expected_total_size: Uint128::new(469), - expected_compressed_size: Uint128::new(469), - expected_error: None, - }, - TC { - pins: vec![], - pins_senders: vec![], - forget_objects: vec![ObjectId::from( - "d1abcabb14dd23d2cf60472dffb4823be10ac20148e8ef7b9644cc14fcf8a073", - )], - forget_senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("bob"), &[]), - ], - expected_count: 3, - expected_total_size: Uint128::new(13), - expected_compressed_size: Uint128::new(13), - expected_error: None, - }, - TC { - pins: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - pins_senders: vec![message_info(&addr("bob"), &[])], - forget_objects: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - forget_senders: vec![message_info(&addr("alice"), &[])], // the sender is different from the pinner, so error - expected_count: 4, - expected_total_size: Uint128::new(478), - expected_compressed_size: Uint128::new(478), - expected_error: Some(ContractError::ObjectPinned {}), - }, - TC { - pins: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - pins_senders: vec![message_info(&addr("bob"), &[])], - forget_objects: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - forget_senders: vec![message_info(&addr("bob"), &[])], // the sender is the same as the pinner, so forget should work - expected_count: 3, - expected_total_size: Uint128::new(474), - expected_compressed_size: Uint128::new(474), - expected_error: None, - }, - TC { - pins: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ], - pins_senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("alice"), &[]), - ], - forget_objects: vec![ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - )], - forget_senders: vec![message_info(&addr("bob"), &[])], // the sender is the same as the pinner, but another pinner is on it so error - expected_count: 4, - expected_total_size: Uint128::new(478), - expected_compressed_size: Uint128::new(478), - expected_error: Some(ContractError::ObjectPinned {}), - }, - TC { - pins: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ], - pins_senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("alice"), &[]), - ], - forget_objects: vec![ObjectId::from( - "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56", - )], - forget_senders: vec![message_info(&addr("bob"), &[])], // the sender is the same as the pinner, but another pinner is on it so error - expected_count: 4, - expected_total_size: Uint128::new(478), - expected_compressed_size: Uint128::new(478), - expected_error: Some(ContractError::Std(StdError::not_found( - not_found_object_info::( - "abafa4428bdc8c34dae28bbc17303a62175f274edf59757b3e9898215a428a56", - ), - ))), - }, - TC { - pins: vec![ - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ObjectId::from( - "315d0d9ab12c5f8884100055f79de50b72db4bd2c9bfd3df049d89640fed1fa6", - ), - ], - pins_senders: vec![ - message_info(&addr("bob"), &[]), - message_info(&addr("alice"), &[]), - ], - forget_objects: vec![ObjectId::from("invalid id")], - forget_senders: vec![message_info(&addr("bob"), &[])], // the sender is the same as the pinner, but another pinner is on it so error - expected_count: 4, - expected_total_size: Uint128::new(478), - expected_compressed_size: Uint128::new(478), - expected_error: Some(ContractError::Std(StdError::parse_err( - type_name::>(), - "invalid Base16 encoding".to_string(), - ))), - }, - ]; - - for case in cases { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg { - owner: None, - bucket: "test".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }, - ) - .unwrap(); - - let data = general_purpose::STANDARD.encode("okp4"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - let _ = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let data = general_purpose::STANDARD.encode("data"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - let _ = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let data = general_purpose::STANDARD.encode("hello"); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - let _ = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let data = general_purpose::STANDARD.encode( - "In a magical land, there \ - were many realms, one of which was known as OKP4. Within this realm, druid programmers \ - possessed the power to create smart contracts. As the kingdom grew, the druids used \ - their skills to power decentralized systems, bringing prosperity and wonder to all who \ - sought their expertise. And so, the legend of the druid programmers and their magical \ - smart contracts lived on, inspiring future generations to unlock the power of the \ - digital realm.", - ); - let msg = ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }; - let _ = execute(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - case.pins - .iter() - .zip(case.pins_senders) - .for_each(|(object_id, info)| { - _ = execute( - deps.as_mut(), - mock_env(), - info, - ExecuteMsg::PinObject { - id: object_id.clone(), - }, - ); - }); - - let mut last_result: Option> = None; - - case.forget_objects - .iter() - .zip(case.forget_senders) - .for_each(|(object_id, info)| { - last_result = Some(execute( - deps.as_mut(), - mock_env(), - info, - ExecuteMsg::ForgetObject { - id: object_id.clone(), - }, - )); - }); - - match case.expected_error { - Some(err) => assert_eq!(last_result.unwrap().unwrap_err(), err), - _ => { - for object_id in case.forget_objects { - assert_eq!( - OBJECT - .load(&deps.storage, decode_hex(object_id.as_str()).into()) - .unwrap_err(), - StdError::not_found(not_found_object_info::(&object_id)) - ); - } - } - } - assert_eq!( - OBJECT - .keys_raw(&deps.storage, None, None, Order::Ascending) - .count(), - case.expected_count - ); - - let bucket = query::bucket(deps.as_ref()).unwrap(); - assert_eq!( - bucket.stat, - BucketStatBuilder::default() - .object_count(Uint128::from(case.expected_count as u128)) - .size(case.expected_total_size) - .compressed_size(case.expected_compressed_size) - .build() - .unwrap() - ); - assert_eq!( - bucket.stat.object_count, - Uint128::from(case.expected_count as u128) - ); - assert_eq!(bucket.stat.size, case.expected_total_size); - assert_eq!(bucket.stat.compressed_size, case.expected_compressed_size); - } - } - - #[test] - fn store_forgotten_object() { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - - instantiate( - deps.as_mut(), - mock_env(), - info.clone(), - InstantiateMsg { - owner: None, - bucket: "test".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }, - ) - .unwrap(); - - let data = general_purpose::STANDARD.encode("data"); - let _ = execute( - deps.as_mut(), - mock_env(), - info.clone(), - ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }, - ) - .unwrap(); - - let _ = execute( - deps.as_mut(), - mock_env(), - info.clone(), - ExecuteMsg::ForgetObject { - id: "3a6eb0790f39ac87c94f3856b2dd2c5d110e6811602261a9a923d3bb23adc8b7".to_string(), - }, - ) - .unwrap(); - - let result = execute( - deps.as_mut(), - mock_env(), - info, - ExecuteMsg::StoreObject { - data: Binary::from_base64(data.as_str()).unwrap(), - pin: false, - }, - ); - - assert_eq!( - result.err(), - None, - "Object should successfully restored after a forgot" - ); - } - - #[test] - fn proper_ownership_initialization_with_none() { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - - let msg = InstantiateMsg { - owner: None, - bucket: "test".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - - instantiate(deps.as_mut(), mock_env(), info, msg).unwrap(); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Ownership {}).unwrap(); - let ownership: cw_ownable::Ownership = from_json(&res).unwrap(); - - assert!(ownership.owner.is_none()); - assert!(ownership.pending_owner.is_none()); - assert!(ownership.pending_expiry.is_none()); - } - - #[test] - fn proper_ownership_initialization_with_specific_owner() { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - let designated_owner = addr("designated_owner"); - - let msg = InstantiateMsg { - owner: Some(designated_owner.to_string()), - bucket: "test".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - - instantiate(deps.as_mut(), mock_env(), info, msg).unwrap(); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Ownership {}).unwrap(); - let ownership: cw_ownable::Ownership = from_json(&res).unwrap(); - - assert_eq!(ownership.owner, Some(designated_owner)); - assert!(ownership.pending_owner.is_none()); - assert!(ownership.pending_expiry.is_none()); - } - - #[test] - fn update_ownership_transfer_success() { - let mut deps = mock_dependencies(); - let info = message_info(&addr(CREATOR), &[]); - let new_owner = addr("new_owner"); - - let msg = InstantiateMsg { - owner: Some(addr(CREATOR).to_string()), - bucket: "test".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - instantiate(deps.as_mut(), mock_env(), info.clone(), msg).unwrap(); - - let transfer_msg = ExecuteMsg::UpdateOwnership(cw_ownable::Action::TransferOwnership { - new_owner: new_owner.to_string(), - expiry: None, - }); - - let res = execute(deps.as_mut(), mock_env(), info, transfer_msg).unwrap(); - assert_eq!(res.messages.len(), 0); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Ownership {}).unwrap(); - let ownership: cw_ownable::Ownership = from_json(&res).unwrap(); - - assert_eq!(ownership.owner, Some(addr(CREATOR))); - assert_eq!(ownership.pending_owner, Some(new_owner)); - assert!(ownership.pending_expiry.is_none()); - } - - #[test] - fn update_ownership_accept_success() { - let mut deps = mock_dependencies(); - let creator_info = message_info(&addr(CREATOR), &[]); - let new_owner = addr("new_owner"); - let new_owner_info = message_info(&new_owner, &[]); - - let msg = InstantiateMsg { - owner: Some(addr(CREATOR).to_string()), - bucket: "test".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - instantiate(deps.as_mut(), mock_env(), creator_info.clone(), msg).unwrap(); - - let transfer_msg = ExecuteMsg::UpdateOwnership(cw_ownable::Action::TransferOwnership { - new_owner: new_owner.to_string(), - expiry: None, - }); - execute(deps.as_mut(), mock_env(), creator_info, transfer_msg).unwrap(); - - let accept_msg = ExecuteMsg::UpdateOwnership(cw_ownable::Action::AcceptOwnership); - let res = execute(deps.as_mut(), mock_env(), new_owner_info, accept_msg).unwrap(); - assert_eq!(res.messages.len(), 0); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Ownership {}).unwrap(); - let ownership: cw_ownable::Ownership = from_json(&res).unwrap(); - - assert_eq!(ownership.owner, Some(new_owner)); - assert!(ownership.pending_owner.is_none()); - assert!(ownership.pending_expiry.is_none()); - } - - #[test] - fn update_ownership_unauthorized_fails() { - let mut deps = mock_dependencies(); - let creator_info = message_info(&addr(CREATOR), &[]); - let unauthorized_info = message_info(&addr(SENDER), &[]); - - let msg = InstantiateMsg { - owner: Some(addr(CREATOR).to_string()), - bucket: "test".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - instantiate(deps.as_mut(), mock_env(), creator_info, msg).unwrap(); - - let transfer_msg = ExecuteMsg::UpdateOwnership(cw_ownable::Action::TransferOwnership { - new_owner: addr("someone_else").to_string(), - expiry: None, - }); - - let err = execute(deps.as_mut(), mock_env(), unauthorized_info, transfer_msg).unwrap_err(); - - assert!(matches!(err, ContractError::Ownership(_))); - } - - #[test] - fn update_ownership_renounce_success() { - let mut deps = mock_dependencies(); - let creator_info = message_info(&addr(CREATOR), &[]); - - let msg = InstantiateMsg { - owner: Some(addr(CREATOR).to_string()), - bucket: "test".to_string(), - config: Default::default(), - limits: Default::default(), - pagination: Default::default(), - }; - instantiate(deps.as_mut(), mock_env(), creator_info.clone(), msg).unwrap(); - - let renounce_msg = ExecuteMsg::UpdateOwnership(cw_ownable::Action::RenounceOwnership); - let res = execute(deps.as_mut(), mock_env(), creator_info, renounce_msg).unwrap(); - assert_eq!(res.messages.len(), 0); - - let res = query(deps.as_ref(), mock_env(), QueryMsg::Ownership {}).unwrap(); - let ownership: cw_ownable::Ownership = from_json(&res).unwrap(); - - assert!(ownership.owner.is_none()); - assert!(ownership.pending_owner.is_none()); - assert!(ownership.pending_expiry.is_none()); - } -} diff --git a/contracts/axone-objectarium/src/crypto.rs b/contracts/axone-objectarium/src/crypto.rs deleted file mode 100644 index cad05036..00000000 --- a/contracts/axone-objectarium/src/crypto.rs +++ /dev/null @@ -1,171 +0,0 @@ -use cosmwasm_std::{StdError, StdResult}; -use cw_storage_plus::{Key, KeyDeserialize, Prefixer, PrimaryKey}; -use md5; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use sha2; -use sha2::Digest; -use std::any::type_name; -use std::fmt; - -/// HashAlgorithm is the type of the hash algorithm. -pub enum HashAlgorithm { - /// Represents the MD5 algorithm. - MD5, - /// Represents the SHA-224 algorithm. - Sha224, - /// Represents the SHA-256 algorithm. - Sha256, - /// Represents the SHA-384 algorithm. - Sha384, - /// Represents the SHA-512 algorithm. - Sha512, -} - -impl HashAlgorithm { - /// hash returns the hash of the given data using the given algorithm. - pub fn hash_fn(&self) -> HashFn { - match self { - HashAlgorithm::MD5 => md5_hash, - HashAlgorithm::Sha224 => sha224_hash, - HashAlgorithm::Sha256 => sha256_hash, - HashAlgorithm::Sha384 => sha384_hash, - HashAlgorithm::Sha512 => sha512_hash, - } - } -} - -/// Hash represent a Object hash as binary value. -#[derive( - Serialize, Deserialize, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, JsonSchema, -)] -pub struct Hash(Vec); - -/// HashFn is the type of the function used to hash data. -pub type HashFn = fn(&Vec) -> Hash; - -/// hash returns the hash of the given data using the given algorithm. -pub fn hash<'a>(algorithm: &'a HashAlgorithm, data: &'a Vec) -> Hash { - algorithm.hash_fn()(data) -} - -/// md5_hash returns the MD5 hash of the given data. -fn md5_hash(data: &Vec) -> Hash { - md5::Md5::digest(data).to_vec().into() -} - -/// sha224_hash returns the SHA-224 hash of the given data. -fn sha224_hash(data: &Vec) -> Hash { - sha2::Sha224::digest(data).to_vec().into() -} - -/// sha256_hash returns the SHA-256 hash of the given data. -fn sha256_hash(data: &Vec) -> Hash { - sha2::Sha256::digest(data).to_vec().into() -} - -/// sha384_hash returns the SHA-384 hash of the given data. -fn sha384_hash(data: &Vec) -> Hash { - sha2::Sha384::digest(data).to_vec().into() -} - -/// sha512_hash returns the SHA-512 hash of the given data. -fn sha512_hash(data: &Vec) -> Hash { - sha2::Sha512::digest(data).to_vec().into() -} - -impl TryFrom for Hash { - type Error = StdError; - - fn try_from(s: String) -> StdResult { - base16ct::lower::decode_vec(s) - .map_err(|e| StdError::parse_err(type_name::>(), e.to_string())) - .map(Hash) - } -} - -// Allows for a (user-friendly) string representation of Hash as a lower Base16 (hex) encoding. -impl fmt::Display for Hash { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let hex_string = base16ct::lower::encode_string(&self.0); - write!(f, "{}", hex_string) - } -} - -impl From> for Hash { - fn from(hash: Vec) -> Self { - Hash(hash) - } -} - -impl From for Vec { - fn from(hash: Hash) -> Self { - hash.0 - } -} - -impl From<&Hash> for Vec { - fn from(hash: &Hash) -> Self { - hash.0.clone() - } -} - -impl<'a> PrimaryKey<'a> for Hash { - type Prefix = (); - type SubPrefix = (); - type Suffix = Self; - type SuperSuffix = Self; - - fn key(&self) -> Vec> { - vec![Key::Ref(self.0.as_ref())] - } -} - -impl KeyDeserialize for Hash { - type Output = Hash; - const KEY_ELEMS: u16 = 1; - - #[inline(always)] - fn from_vec(value: Vec) -> StdResult { - Ok(Hash(value)) - } -} - -impl KeyDeserialize for &Hash { - type Output = Hash; - const KEY_ELEMS: u16 = 1; - - #[inline(always)] - fn from_vec(value: Vec) -> StdResult { - Ok(Hash(value)) - } -} - -impl<'a> Prefixer<'a> for Hash { - fn prefix(&self) -> Vec> { - vec![Key::Ref(self.0.as_ref())] - } -} - -impl AsRef<[u8]> for Hash { - #[inline] - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -#[cfg(test)] -mod tests { - use crate::crypto::Hash; - - #[test] - fn vec_from_hash() { - let h = Hash(vec![1, 2, 3]); - let result: Vec = h.into(); - assert_eq!(result, vec![1, 2, 3]); - - let h = &Hash(vec![3, 2, 1]); - let result: Vec = h.into(); - assert_eq!(result, vec![3, 2, 1]) - } -} diff --git a/contracts/axone-objectarium/src/cursor.rs b/contracts/axone-objectarium/src/cursor.rs deleted file mode 100644 index 1e347385..00000000 --- a/contracts/axone-objectarium/src/cursor.rs +++ /dev/null @@ -1,61 +0,0 @@ -use crate::crypto::Hash; -use crate::msg::Cursor; -use crate::state::{Object, Pin, PinPK}; -use bin_it::{BinaryReader, BinaryWriter}; -use cosmwasm_std::{Addr, StdError, StdResult}; - -pub trait AsCursor { - fn encode_cursor(&self) -> StdResult; - fn decode_cursor(_: Cursor) -> StdResult; -} - -impl AsCursor for Object { - fn encode_cursor(&self) -> StdResult { - let mut writer = BinaryWriter::new(); - writer.write_vec_u8(self.id.as_ref()); - - Ok(bs58::encode(writer.get_data()).into_string()) - } - - fn decode_cursor(cursor: Cursor) -> StdResult { - let decoded = bs58::decode(cursor) - .into_vec() - .map_err(|err| StdError::parse_err("Cursor", err))?; - - let mut reader = BinaryReader::new(&decoded); - let hash = reader - .read_vec_u8() - .map_err(|err| StdError::parse_err("Cursor", err))?; - - Ok(hash.into()) - } -} - -impl AsCursor for Pin { - fn encode_cursor(&self) -> StdResult { - let mut writer = BinaryWriter::new(); - writer.write_vec_u8(self.id.as_ref()); - writer.write_string(self.address.as_str()); - - Ok(bs58::encode(writer.get_data()).into_string()) - } - - fn decode_cursor(cursor: Cursor) -> StdResult<(Hash, Addr)> { - let decoded = bs58::decode(cursor) - .into_vec() - .map_err(|err| StdError::parse_err("Cursor", err))?; - - let mut reader = BinaryReader::new(&decoded); - let hash = reader - .read_vec_u8() - .map_err(|err| StdError::parse_err("Cursor", err))?; - let addr = reader - .read_string() - .map_err(|err| StdError::parse_err("Cursor", err))?; - - Ok((hash.into(), Addr::unchecked(addr))) - } -} - -#[cfg(test)] -mod tests {} diff --git a/contracts/axone-objectarium/src/error.rs b/contracts/axone-objectarium/src/error.rs deleted file mode 100644 index ffaf70ca..00000000 --- a/contracts/axone-objectarium/src/error.rs +++ /dev/null @@ -1,111 +0,0 @@ -use crate::compress::CompressionError; -use crate::msg::CompressionAlgorithm; -use cosmwasm_std::{StdError, Uint128}; -use cw_ownable::OwnershipError; -use cw_utils::PaymentError; -use thiserror::Error; - -#[derive(Debug, Error, PartialEq)] -pub enum ContractError { - #[error("{0}")] - Std(#[from] StdError), - - #[error("{0}")] - Bucket(#[from] BucketError), - - #[error("Object is pinned and cannot be forgotten")] - ObjectPinned {}, - - #[error("Compression error: {0}")] - CompressionError(String), - - #[error("{0}")] - Payment(#[from] PaymentError), - - #[error("{0}")] - Ownership(#[from] OwnershipError), -} - -#[derive(Debug, Eq, Error, PartialEq)] -pub enum BucketError { - #[error("Name of bucket could not be empty")] - EmptyName, - - #[error("Maximum total size exceeded: {0} / {1}")] - MaxTotalSizeLimitExceeded(Uint128, Uint128), - - #[error("Maximum objects number exceeded: {0} / {1}")] - MaxObjectsLimitExceeded(Uint128, Uint128), - - #[error("Maximum object size exceeded: {0} / {1}")] - MaxObjectSizeLimitExceeded(Uint128, Uint128), - - #[error("Maximum object pins number exceeded: {0} / {1}")] - MaxObjectPinsLimitExceeded(Uint128, Uint128), - - #[error("Compression algorithm is not accepted: {0:?} (accepted: \"{1:?}\")")] - CompressionAlgorithmNotAccepted(CompressionAlgorithm, Vec), -} - -impl From for ContractError { - fn from(err: CompressionError) -> Self { - match err { - CompressionError::Error(err) => ContractError::CompressionError(err), - } - } -} - -#[test] -fn test_bucket_error_messages() { - let cases = vec![ - (ContractError::Std(StdError::generic_err("Software failure. Press left mouse button to continue. Guru Mediation #8000000B.0000AAC00")), - "Generic error: Software failure. Press left mouse button to continue. Guru Mediation #8000000B.0000AAC00" - ), - ( - ContractError::Bucket(BucketError::EmptyName), - "Name of bucket could not be empty", - ), - ( - ContractError::Bucket(BucketError::MaxTotalSizeLimitExceeded( - 200u8.into(), - 100u8.into(), - )), - "Maximum total size exceeded: 200 / 100", - ), - ( - ContractError::Bucket(BucketError::MaxObjectsLimitExceeded(42u8.into(), 40u8.into())), - "Maximum objects number exceeded: 42 / 40", - ), - ( - ContractError::Bucket(BucketError::MaxObjectSizeLimitExceeded( - 603u16.into(), - 111u16.into(), - )), - "Maximum object size exceeded: 603 / 111", - ), - ( - ContractError::Bucket(BucketError::MaxObjectPinsLimitExceeded(5u8.into(), 2u8.into())), - "Maximum object pins number exceeded: 5 / 2", - ), - ( - ContractError::Bucket(BucketError::CompressionAlgorithmNotAccepted( - CompressionAlgorithm::Snappy, - vec![CompressionAlgorithm::Passthrough], - )), - "Compression algorithm is not accepted: Snappy (accepted: \"[Passthrough]\")", - ), - (ContractError::ObjectPinned {}, "Object is pinned and cannot be forgotten"), - ( - ContractError::CompressionError("Insufficient ch'i to compress file".to_string()), - "Compression error: Insufficient ch'i to compress file", - ), - ( - CompressionError::Error("Cannot compress empty data".to_string()).into(), - "Compression error: Cannot compress empty data", - ), - ]; - - for (error, expected_message) in cases { - assert_eq!(error.to_string(), expected_message); - } -} diff --git a/contracts/axone-objectarium/src/lib.rs b/contracts/axone-objectarium/src/lib.rs deleted file mode 100644 index 59c0e3c3..00000000 --- a/contracts/axone-objectarium/src/lib.rs +++ /dev/null @@ -1,10 +0,0 @@ -pub mod compress; -pub mod contract; -pub mod crypto; -mod cursor; -mod error; -pub mod msg; -mod pagination; -pub mod state; - -pub use crate::error::ContractError; diff --git a/contracts/axone-objectarium/src/msg.rs b/contracts/axone-objectarium/src/msg.rs deleted file mode 100644 index 7fb5b76b..00000000 --- a/contracts/axone-objectarium/src/msg.rs +++ /dev/null @@ -1,450 +0,0 @@ -use cosmwasm_schema::{cw_serde, QueryResponses}; -use cosmwasm_std::{Binary, Uint128}; -use cw_ownable::{cw_ownable_execute, cw_ownable_query}; -use derive_builder::Builder; - -/// ObjectId is the type of identifier of an object in the bucket. -pub type ObjectId = String; - -/// Cursor is the opaque type of cursor used for pagination. -pub type Cursor = String; - -/// Instantiate messages -#[cw_serde] -pub struct InstantiateMsg { - /// The contract owner. - /// If not set, the contract starts without an owner (ownerless). - pub owner: Option, - /// The name of the bucket. - /// The name could not be empty or contains whitespaces. - /// If name contains whitespace, they will be removed. - pub bucket: String, - /// The configuration of the bucket. - #[serde(default)] - pub config: BucketConfig, - /// The limits of the bucket. - #[serde(default)] - pub limits: BucketLimits, - /// The configuration for paginated query. - #[serde(default)] - pub pagination: PaginationConfig, -} - -/// Execute messages -#[cw_ownable_execute] -#[cw_serde] -pub enum ExecuteMsg { - /// # StoreObject - /// StoreObject store an object to the bucket. - /// The object is referenced by the hash of its content and this value is returned. - /// If the object is already stored, it is a no-op. It may be pinned though. - /// - /// The "pin" parameter specifies whether the object should be pinned for the sender. Pinning an - /// object ensures it is protected from being removed from storage, making it persistent and - /// guaranteeing its indefinite accessibility. It’s important to note that pinning is optional; - /// objects can be stored without pinning. However, be aware that non-pinned objects can be removed - /// from the storage by anyone at any time, making them no longer accessible. - /// - /// The object will be compressed using the bucket's compression algorithm as specified - /// in the bucket configuration. - StoreObject { - /// The content of the object to store. - data: Binary, - /// Specifies whether the object should be pinned for the sender. - /// Pinning ensures the object remains persistent and cannot be removed from storage by anyone. - pin: bool, - }, - - /// # ForgetObject - /// ForgetObject first unpins the object from the bucket for the sender, then removes - /// it from storage if it is no longer pinned by anyone. - /// If the object is still pinned by other senders, it is not removed from storage and an error is returned. - /// If the object is not pinned for the sender, this operation is a no-op. - ForgetObject { id: ObjectId }, - - /// # PinObject - /// PinObject pins the object in the bucket for the sender. If the object is already pinned - /// for the sender, this operation is a no-op. - /// While an object is pinned, it cannot be removed from storage. - PinObject { id: ObjectId }, - - /// # UnpinObject - /// UnpinObject unpins the object in the bucket for the sender. If the object is not pinned - /// for the sender, this operation is a no-op. - /// The object can be removed from storage if it is no longer pinned by anyone. - UnpinObject { id: ObjectId }, -} - -/// Query messages -#[cw_ownable_query] -#[cw_serde] -#[derive(QueryResponses)] -pub enum QueryMsg { - /// # Bucket - /// Bucket returns the bucket information. - #[returns(BucketResponse)] - Bucket {}, - - /// # Object - /// Object returns the object information with the given id. - #[returns(ObjectResponse)] - Object { - /// The id of the object to get. - id: ObjectId, - }, - - /// # Objects - /// Objects returns the list of objects in the bucket with support for pagination. - #[returns(ObjectsResponse)] - Objects { - /// The point in the sequence to start returning objects. - after: Option, - /// The number of objects to return. - first: Option, - }, - - /// # ObjectData - /// ObjectData returns the content of the object with the given id. - #[returns(Binary)] - ObjectData { - /// The id of the object to get. - id: ObjectId, - }, - - /// # ObjectsPinnedBy - /// ObjectsPinnedBy returns the list of objects pinned by the given address with support for pagination. - #[returns(ObjectsResponse)] - ObjectsPinnedBy { - /// The address whose pinned objects should be listed. - address: String, - /// The point in the sequence to start returning pinned objects. - after: Option, - /// The number of objects to return. - first: Option, - }, - - /// # PinsForObject - /// PinsForObject returns the list of addresses that pinned the object with the given id with - /// support for pagination. - #[returns(PinsForObjectResponse)] - PinsForObject { - /// The id of the object for which to list all pinning addresses. - object_id: ObjectId, - /// The point in the sequence to start returning pins. - after: Option, - /// The number of pins to return. - first: Option, - }, -} - -/// # PageInfo -/// PageInfo is the page information returned for paginated queries. -#[cw_serde] -pub struct PageInfo { - /// Tells if there is a next page. - pub has_next_page: bool, - /// The cursor to the next page. - pub cursor: Cursor, -} - -/// # BucketResponse -/// BucketResponse is the response of the Bucket query. -#[cw_serde] -pub struct BucketResponse { - /// The name of the bucket. - pub name: String, - /// The configuration of the bucket. - pub config: BucketConfig, - /// The limits of the bucket. - pub limits: BucketLimits, - /// The configuration for paginated query. - pub pagination: PaginationConfig, - /// The statistics of the bucket. - pub stat: BucketStat, -} - -/// CompressionAlgorithm is an enumeration that defines the different compression algorithms -/// supported for compressing the content of objects. -/// The compression algorithm specified here are relevant algorithms for compressing data on-chain, -/// which means that they are fast to compress and decompress, and have a low computational cost. -/// -/// The order of the compression algorithms is based on their estimated computational cost (quite opinionated) -/// during both compression and decompression, ranging from the lowest to the highest. This particular -/// order is utilized to establish the default compression algorithm for storing an object. -#[cw_serde] -#[derive(Copy, Default, Eq)] -pub enum CompressionAlgorithm { - /// # Passthrough - /// Represents no compression algorithm. - /// The object is stored as is without any compression. - #[default] - Passthrough, - /// # Snappy - /// Represents the Snappy algorithm. - /// Snappy is a compression/decompression algorithm that does not aim for maximum compression. Instead, it aims for very high speeds and reasonable - /// compression. - /// - /// See [the snappy web page](https://google.github.io/snappy/) for more information. - Snappy, - /// # Lzma - /// Represents the LZMA algorithm. - /// LZMA is a lossless data compression/decompression algorithm that features a high compression ratio and a variable compression-dictionary size up to 4 GB. - /// - /// See [the LZMA wiki page](https://en.wikipedia.org/wiki/Lempel%E2%80%93Ziv%E2%80%93Markov_chain_algorithm) for more information. - Lzma, -} - -/// HashAlgorithm is an enumeration that defines the different hash algorithms -/// supported for hashing the content of objects. -#[cw_serde] -#[derive(Copy, Default, Eq)] -pub enum HashAlgorithm { - /// # MD5 - /// Represents the MD5 algorithm. - /// MD5 is a widely used cryptographic hash function that produces a 128-bit hash value. - /// The computational cost of MD5 is relatively low compared to other hash functions, but its short hash length - /// makes it easier to find hash collisions. It is now considered insecure for cryptographic purposes, but can still - /// used in non-security contexts. - /// - /// MD5 hashes are stored on-chain as 32 hexadecimal characters. - /// - /// See [the MD5 Wikipedia page](https://en.wikipedia.org/wiki/MD5) for more information. - MD5, - - /// # Sha224 - /// Represents the SHA-224 algorithm. - /// SHA-224 is a variant of the SHA-2 family of hash functions that produces a 224-bit hash value. - /// It is similar to SHA-256, but with a shorter output size. - /// The computational cost of SHA-224 is moderate, and its relatively short hash length makes it easier to store - /// and transmit. - /// - /// SHA-224 hashes are stored on-chain as 56 hexadecimal characters. - /// - /// See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. - Sha224, - - /// # SHA256 - /// Represents the SHA-256 algorithm. - /// SHA-256 is a member of the SHA-2 family of hash functions that produces a 256-bit hash value. - /// It is widely used in cryptography and other security-related applications. - /// The computational cost of SHA-256 is moderate, and its hash length strikes a good balance between security - /// and convenience. - /// - /// SHA-256 hashes are stored on-chain as 64 hexadecimal characters. - /// - /// See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. - #[default] - Sha256, - - /// # SHA384 - /// Represents the SHA-384 algorithm. - /// SHA-384 is a variant of the SHA-2 family of hash functions that produces a 384-bit hash value. - /// It is similar to SHA-512, but with a shorter output size. - /// The computational cost of SHA-384 is relatively high, but its longer hash length provides better security - /// against hash collisions. - /// - /// SHA-384 hashes are stored on-chain as 96 hexadecimal characters. - /// - /// See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. - Sha384, - - /// # SHA512 - /// Represents the SHA-512 algorithm. - /// SHA-512 is a member of the SHA-2 family of hash functions that produces a 512-bit hash value. - /// It is widely used in cryptography and other security-related applications. - /// The computational cost of SHA-512 is relatively high, but its longer hash length provides better security - /// against hash collisions. - /// - /// SHA-512 hashes are stored on-chain as 128 hexadecimal characters. - /// - /// See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. - Sha512, -} - -/// BucketConfig is the type of the configuration of a bucket. -/// -/// The configuration is set at the instantiation of the bucket, and is immutable and cannot be changed. -/// The configuration is optional and if not set, the default configuration is used. -#[cw_serde] -#[derive(Builder, Default)] -#[builder(default, setter(into, strip_option))] -pub struct BucketConfig { - /// The algorithm used to hash the content of the objects to generate the id of the objects. - /// The algorithm is optional and if not set, the default algorithm is used. - /// - /// The default algorithm is Sha256 if not set. - #[serde(default)] - pub hash_algorithm: HashAlgorithm, - /// The compression algorithm used for all objects in the bucket. - /// All objects stored in the bucket will use this compression algorithm. - /// The default algorithm is Passthrough if not set. - #[serde(default)] - pub compression_algorithm: CompressionAlgorithm, -} - -/// BucketLimits is the type of the limits of a bucket. -/// -/// The limits are optional and if not set, there is no limit. -#[cw_serde] -#[derive(Builder, Default)] -#[builder(default, setter(into, strip_option))] -pub struct BucketLimits { - /// The maximum total size of the objects in the bucket. - pub max_total_size: Option, - /// The maximum number of objects in the bucket. - pub max_objects: Option, - /// The maximum size of the objects in the bucket. - pub max_object_size: Option, - /// The maximum number of pins in the bucket for an object. - pub max_object_pins: Option, -} - -/// PaginationConfig is the type carrying configuration for paginated queries. -/// -/// The fields are optional and if not set, there is a default configuration. -#[cw_serde] -#[derive(Builder)] -#[builder(default, setter(strip_option))] -pub struct PaginationConfig { - /// The maximum elements a page can contain. - /// - /// Shall be less than `u32::MAX - 1`. - /// Default to '30' if not set. - #[serde(default = "PaginationConfig::default_page_max_size")] - pub max_page_size: u32, - /// The default number of elements in a page. - /// - /// Shall be less or equal than `max_page_size`. - /// Default to '10' if not set. - #[serde(default = "PaginationConfig::default_page_default_size")] - pub default_page_size: u32, -} - -impl PaginationConfig { - const fn default_page_max_size() -> u32 { - 30 - } - const fn default_page_default_size() -> u32 { - 10 - } -} - -impl Default for PaginationConfig { - fn default() -> Self { - PaginationConfig { - max_page_size: Self::default_page_max_size(), - default_page_size: Self::default_page_default_size(), - } - } -} - -/// # BucketStat -/// -/// BucketStat is the type of the statistics of a bucket. -#[cw_serde] -#[derive(Builder, Default)] -pub struct BucketStat { - /// The total size of the objects contained in the bucket. - pub size: Uint128, - /// The total size of the objects contained in the bucket after compression. - pub compressed_size: Uint128, - /// The number of objects in the bucket. - pub object_count: Uint128, -} - -/// # ObjectResponse -/// ObjectResponse is the response of the Object query. -#[cw_serde] -pub struct ObjectResponse { - /// The id of the object. - pub id: ObjectId, - /// Tells if the object is pinned by at least one address. - pub is_pinned: bool, - /// The size of the object. - pub size: Uint128, - /// The size of the object when compressed. If the object is not compressed, the value is the - /// same as `size`. - pub compressed_size: Uint128, -} - -/// # ObjectsResponse -/// ObjectsResponse is the response of the Objects query. -#[cw_serde] -pub struct ObjectsResponse { - /// The list of objects in the bucket. - pub data: Vec, - /// The page information. - pub page_info: PageInfo, -} - -/// # PinsForObjectResponse -/// PinsForObjectResponse is the response of the GetObjectPins query. -#[cw_serde] -pub struct PinsForObjectResponse { - /// The list of addresses that pinned the object. - pub data: Vec, - /// The page information. - pub page_info: PageInfo, -} - -#[cfg(test)] -mod tests { - use crate::msg::CompressionAlgorithm::Passthrough; - use crate::msg::HashAlgorithm::Sha256; - use crate::msg::{BucketConfig, BucketLimits, InstantiateMsg, PaginationConfig}; - use schemars::_serde_json; - - #[test] - fn pagination_config_default_deserialization() { - let json = r#" - {} - "#; - - let page: PaginationConfig = _serde_json::from_str(json).unwrap(); - assert_eq!(page.max_page_size, 30); - assert_eq!(page.default_page_size, 10); - } - - #[test] - fn bucket_config_default_deserialization() { - let json = r#" - {} - "#; - - let config: BucketConfig = _serde_json::from_str(json).unwrap(); - assert_eq!(config.hash_algorithm, Sha256); - assert_eq!(config.compression_algorithm, Passthrough); - } - - #[test] - fn bucket_limit_default_deserialization() { - let json = r#" - {} - "#; - - let limits: BucketLimits = _serde_json::from_str(json).unwrap(); - assert_eq!(limits.max_object_pins, None); - assert_eq!(limits.max_objects, None); - assert_eq!(limits.max_object_size, None); - assert_eq!(limits.max_total_size, None); - } - - #[test] - fn instantiate_default_deserialization() { - let json = r#" - { - "bucket": "foo" - } - "#; - let msg: InstantiateMsg = _serde_json::from_str(json).unwrap(); - - assert_eq!(msg.pagination.max_page_size, 30); - assert_eq!(msg.pagination.default_page_size, 10); - assert_eq!(msg.config.hash_algorithm, Sha256); - assert_eq!(msg.config.compression_algorithm, Passthrough); - assert_eq!(msg.limits.max_object_pins, None); - assert_eq!(msg.limits.max_objects, None); - assert_eq!(msg.limits.max_object_size, None); - assert_eq!(msg.limits.max_total_size, None); - } -} diff --git a/contracts/axone-objectarium/src/pagination.rs b/contracts/axone-objectarium/src/pagination.rs deleted file mode 100644 index 00aeda94..00000000 --- a/contracts/axone-objectarium/src/pagination.rs +++ /dev/null @@ -1,347 +0,0 @@ -use crate::cursor::AsCursor; -use crate::msg::{Cursor, PageInfo}; -use crate::state::Pagination; -use cosmwasm_std::{StdError, StdResult}; -use cw_storage_plus::{Bound, PrimaryKey}; -use serde::de::DeserializeOwned; -use serde::Serialize; -use std::marker::PhantomData; - -#[derive(Clone)] -pub struct PaginationHandler<'a, T, PK> -where - T: Serialize + DeserializeOwned, - PK: PrimaryKey<'a>, -{ - max_page_size: u32, - default_page_size: u32, - - _data_type: PhantomData, - _pk_type: PhantomData, - _lifetime: PhantomData<&'a ()>, -} - -impl<'a, T, PK> From for PaginationHandler<'a, T, PK> -where - T: Serialize + DeserializeOwned, - PK: PrimaryKey<'a>, -{ - fn from(value: Pagination) -> Self { - PaginationHandler::new(value.max_page_size, value.default_page_size) - } -} - -pub trait QueryPage<'a, T, PK> { - fn query_page( - self, - iter_fn: I, - after: Option, - first: Option, - ) -> StdResult<(Vec, PageInfo)> - where - I: FnOnce(Option>) -> Box> + 'a>; -} - -impl<'a, T, PK> QueryPage<'a, T, PK> for PaginationHandler<'a, T, PK> -where - T: Serialize + DeserializeOwned + AsCursor + Clone, - PK: PrimaryKey<'a>, -{ - fn query_page( - self, - iter_fn: I, - after: Option, - first: Option, - ) -> StdResult<(Vec, PageInfo)> - where - I: FnOnce(Option>) -> Box> + 'a>, - { - self.query_page_cursor_fn( - iter_fn, - |c| T::decode_cursor(c), - AsCursor::encode_cursor, - after, - first, - ) - } -} - -impl<'a, T, PK> PaginationHandler<'a, T, PK> -where - T: Serialize + DeserializeOwned, - PK: PrimaryKey<'a>, -{ - pub const fn new(max_page_size: u32, default_page_size: u32) -> Self { - PaginationHandler { - max_page_size, - default_page_size, - _data_type: PhantomData, - _pk_type: PhantomData, - _lifetime: PhantomData, - } - } - - fn query_page_cursor_fn( - self, - iter_fn: I, - cursor_dec_fn: CD, - cursor_enc_fn: CE, - after: Option, - first: Option, - ) -> StdResult<(Vec, PageInfo)> - where - I: FnOnce(Option>) -> Box> + 'a>, - CD: FnOnce(Cursor) -> StdResult, - CE: FnOnce(&T) -> StdResult, - { - let min_bound = match after { - Some(cursor) => Some(Bound::exclusive(cursor_dec_fn(cursor)?)), - _ => None, - }; - let page_size = self.compute_page_size(first)?; - let mut items: Vec = iter_fn(min_bound) - .take(page_size + 1) - .map(|res: StdResult<(PK, T)>| res.map(|(_, item)| item)) - .collect::>>()?; - - let has_next_page = items.len() > page_size; - if has_next_page { - items.pop(); - } - - let cursor = items - .last() - .map(cursor_enc_fn) - .transpose()? - .unwrap_or_default(); - - Ok(( - items, - PageInfo { - has_next_page, - cursor, - }, - )) - } - - fn compute_page_size(self, first: Option) -> StdResult { - match first { - Some(req) => { - if req > self.max_page_size { - return Err(StdError::generic_err( - "Requested page size exceed maximum allowed", - )); - } - Ok(req) - } - _ => Ok(self.default_page_size), - } - .map(|size| size as usize) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::slice::Iter; - - struct TestIter<'a> { - sub_iter: Iter<'a, i32>, - shall_err: bool, - } - - impl<'a> TestIter<'a> { - fn map_to_result(&mut self, val: i32) -> StdResult<(i32, i32)> { - if self.shall_err { - return Err(StdError::generic_err("iter error".to_string())); - } - Ok((val, val)) - } - } - - impl<'a> Iterator for TestIter<'a> { - type Item = StdResult<(i32, i32)>; - - fn next(&mut self) -> Option { - match self.sub_iter.next() { - Some(&x) => Some(self.map_to_result(x)), - _ => None, - } - } - } - - #[test] - fn query_page() { - let data = &[1, 2, 3, 4, 5]; - let handler: PaginationHandler = Pagination { - max_page_size: 3, - default_page_size: 2, - } - .into(); - - let iter_fn = |min_bound: Option>| match min_bound { - Some(Bound::Exclusive((b, ..))) => Box::new(TestIter { - sub_iter: data[b as usize..].iter(), - shall_err: false, - }) - as Box>>, - _ => Box::new(TestIter { - sub_iter: data.iter(), - shall_err: false, - }), - }; - let cursor_dec_fn = - |cursor: Cursor| cursor.parse::().map_err(|_| StdError::generic_err("")); - let cursor_enc_fn = |pk: &i32| Ok(pk.to_string()); - - let res = handler - .clone() - .query_page_cursor_fn( - |_: Option>| { - Box::new(TestIter { - sub_iter: (&[] as &[i32]).iter(), - shall_err: true, - }) - }, - cursor_dec_fn, - cursor_enc_fn, - None, - None, - ) - .unwrap(); - assert_eq!(res.0, Vec::::new()); - assert_eq!( - res.1, - PageInfo { - has_next_page: false, - cursor: "".to_string(), - } - ); - - let cases = vec![ - ( - None, - None, - vec![1, 2], - PageInfo { - has_next_page: true, - cursor: "2".to_string(), - }, - ), - ( - None, - Some(1), - vec![1], - PageInfo { - has_next_page: true, - cursor: "1".to_string(), - }, - ), - ( - None, - Some(3), - vec![1, 2, 3], - PageInfo { - has_next_page: true, - cursor: "3".to_string(), - }, - ), - ( - Some("1".to_string()), - None, - vec![2, 3], - PageInfo { - has_next_page: true, - cursor: "3".to_string(), - }, - ), - ( - Some("2".to_string()), - Some(3), - vec![3, 4, 5], - PageInfo { - has_next_page: false, - cursor: "5".to_string(), - }, - ), - ( - Some("3".to_string()), - Some(3), - vec![4, 5], - PageInfo { - has_next_page: false, - cursor: "5".to_string(), - }, - ), - ]; - - for case in cases { - let res = handler - .clone() - .query_page_cursor_fn(iter_fn, cursor_dec_fn, cursor_enc_fn, case.0, case.1) - .unwrap(); - assert_eq!(res.0, case.2); - assert_eq!(res.1, case.3); - } - } - - #[test] - fn query_page_err() { - let data = &[1, 2, 3, 4, 5]; - let handler: PaginationHandler = Pagination { - max_page_size: 3, - default_page_size: 2, - } - .into(); - - let iter_fn = |_: Option>| { - Box::new(TestIter { - sub_iter: data.iter(), - shall_err: false, - }) as Box>> - }; - let cursor_dec_fn = - |cursor: Cursor| cursor.parse::().map_err(|_| StdError::generic_err("")); - let cursor_enc_fn = |pk: &i32| Ok(pk.to_string()); - - let res = handler.clone().query_page_cursor_fn( - |_: Option>| { - Box::new(TestIter { - sub_iter: data.iter(), - shall_err: true, - }) - }, - cursor_dec_fn, - cursor_enc_fn, - None, - None, - ); - assert_eq!(res, Err(StdError::generic_err("iter error".to_string()))); - - let res = handler.clone().query_page_cursor_fn( - iter_fn, - cursor_dec_fn, - cursor_enc_fn, - None, - Some(4), - ); - assert_eq!( - res, - Err(StdError::generic_err( - "Requested page size exceed maximum allowed".to_string() - )) - ); - - let res = handler.clone().query_page_cursor_fn( - iter_fn, - |_| Err(StdError::generic_err("cursor decode error")), - cursor_enc_fn, - Some("1".to_string()), - None, - ); - assert_eq!( - res, - Err(StdError::generic_err("cursor decode error".to_string())) - ); - } -} diff --git a/contracts/axone-objectarium/src/state.rs b/contracts/axone-objectarium/src/state.rs deleted file mode 100644 index a59b6de7..00000000 --- a/contracts/axone-objectarium/src/state.rs +++ /dev/null @@ -1,367 +0,0 @@ -use crate::compress::CompressionAlgorithm; -use crate::crypto::Hash; -use crate::error::BucketError; -use crate::error::BucketError::EmptyName; -use crate::msg; -use crate::msg::{ObjectResponse, PaginationConfig}; -use cosmwasm_std::{ensure, ensure_ne, Addr, StdError, StdResult, Uint128}; -use cw_storage_plus::{Index, IndexList, IndexedMap, Item, Map, MultiIndex}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -pub const DATA: Map> = Map::new("DATA"); - -#[derive(Clone, Debug, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -pub struct Bucket { - /// The name of the bucket. - pub name: String, - /// The configuration for the bucket. - pub config: BucketConfig, - /// The limits of the bucket. - pub limits: BucketLimits, - /// The configuration for paginated query. - pub pagination: Pagination, - /// Some information on the current bucket usage. - pub stat: BucketStat, -} - -#[derive(Clone, Debug, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -pub struct BucketStat { - /// The total size of the objects contained in the bucket. - pub size: Uint128, - /// The total size of the objects contained in the bucket after compression. - pub compressed_size: Uint128, - /// The number of objects in the bucket. - pub object_count: Uint128, -} - -impl Bucket { - pub fn try_new( - name: String, - config: BucketConfig, - limits: BucketLimits, - pagination: Pagination, - ) -> Result { - let n: String = name.split_whitespace().collect(); - ensure!(!n.is_empty(), EmptyName); - - Ok(Self { - name: n, - config, - limits, - pagination, - stat: BucketStat { - size: Uint128::zero(), - compressed_size: Uint128::zero(), - object_count: Uint128::zero(), - }, - }) - } -} - -/// HashAlgorithm is an enumeration that defines the different hash algorithms -/// supported for hashing the content of objects. -#[derive(Clone, Copy, Debug, Default, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -pub enum HashAlgorithm { - /// Represents the MD5 algorithm. - MD5, - /// Represents the SHA-224 algorithm. - Sha224, - /// Represents the SHA-256 algorithm. - #[default] - Sha256, - /// Represents the SHA-384 algorithm. - Sha384, - /// Represents the SHA-512 algorithm. - Sha512, -} - -impl From for HashAlgorithm { - fn from(algorithm: msg::HashAlgorithm) -> Self { - match algorithm { - msg::HashAlgorithm::MD5 => HashAlgorithm::MD5, - msg::HashAlgorithm::Sha224 => HashAlgorithm::Sha224, - msg::HashAlgorithm::Sha256 => HashAlgorithm::Sha256, - msg::HashAlgorithm::Sha384 => HashAlgorithm::Sha384, - msg::HashAlgorithm::Sha512 => HashAlgorithm::Sha512, - } - } -} - -impl From for msg::HashAlgorithm { - fn from(algorithm: HashAlgorithm) -> Self { - match algorithm { - HashAlgorithm::MD5 => msg::HashAlgorithm::MD5, - HashAlgorithm::Sha224 => msg::HashAlgorithm::Sha224, - HashAlgorithm::Sha256 => msg::HashAlgorithm::Sha256, - HashAlgorithm::Sha384 => msg::HashAlgorithm::Sha384, - HashAlgorithm::Sha512 => msg::HashAlgorithm::Sha512, - } - } -} - -impl From for CompressionAlgorithm { - fn from(algorithm: msg::CompressionAlgorithm) -> Self { - match algorithm { - msg::CompressionAlgorithm::Passthrough => CompressionAlgorithm::Passthrough, - msg::CompressionAlgorithm::Snappy => CompressionAlgorithm::Snappy, - msg::CompressionAlgorithm::Lzma => CompressionAlgorithm::Lzma, - } - } -} - -impl From for msg::CompressionAlgorithm { - fn from(algorithm: CompressionAlgorithm) -> Self { - match algorithm { - CompressionAlgorithm::Passthrough => msg::CompressionAlgorithm::Passthrough, - CompressionAlgorithm::Snappy => msg::CompressionAlgorithm::Snappy, - CompressionAlgorithm::Lzma => msg::CompressionAlgorithm::Lzma, - } - } -} - -/// BucketConfig is the type of the configuration of a bucket. -/// -/// The configuration is set at the instantiation of the bucket, and is immutable and cannot be changed. -#[derive(Clone, Debug, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -pub struct BucketConfig { - /// The algorithm used to hash the content of the objects to generate the id of the objects. - /// The algorithm is optional and if not set, the default algorithm is used. - /// - /// The default algorithm is Sha256. - pub hash_algorithm: HashAlgorithm, - /// The compression algorithm used for all objects in the bucket. - /// All objects stored in the bucket will use this compression algorithm. - /// The default algorithm is Passthrough. - pub compression_algorithm: CompressionAlgorithm, -} - -impl BucketConfig { - fn new( - hash_algorithm: HashAlgorithm, - compression_algorithm: CompressionAlgorithm, - ) -> BucketConfig { - BucketConfig { - hash_algorithm, - compression_algorithm, - } - } -} - -impl From for BucketConfig { - fn from(config: msg::BucketConfig) -> BucketConfig { - BucketConfig::new( - config.hash_algorithm.into(), - config.compression_algorithm.into(), - ) - } -} - -impl From for msg::BucketConfig { - fn from(config: BucketConfig) -> Self { - msg::BucketConfig { - hash_algorithm: config.hash_algorithm.into(), - compression_algorithm: config.compression_algorithm.into(), - } - } -} - -/// BucketLimits is the type of the limits of a bucket. -/// -/// The limits are optional and if not set, there is no limit. -#[derive(Clone, Debug, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -pub struct BucketLimits { - /// The maximum total size of the objects in the bucket. - pub max_total_size: Option, - /// The maximum number of objects in the bucket. - pub max_objects: Option, - /// The maximum size of the objects in the bucket. - pub max_object_size: Option, - /// The maximum number of pins in the bucket for an object. - pub max_object_pins: Option, -} - -impl From for msg::BucketLimits { - fn from(limits: BucketLimits) -> Self { - msg::BucketLimits { - max_total_size: limits.max_total_size, - max_objects: limits.max_objects, - max_object_size: limits.max_object_size, - max_object_pins: limits.max_object_pins, - } - } -} - -impl From for msg::BucketStat { - fn from(stat: BucketStat) -> Self { - msg::BucketStat { - size: stat.size, - compressed_size: stat.compressed_size, - object_count: stat.object_count, - } - } -} -impl BucketLimits { - fn try_new( - max_total_size: Option, - max_objects: Option, - max_object_size: Option, - max_object_pins: Option, - ) -> StdResult { - ensure_ne!( - max_total_size, - Some(Uint128::zero()), - StdError::generic_err("'max_total_size' cannot be zero") - ); - ensure_ne!( - max_objects, - Some(Uint128::zero()), - StdError::generic_err("'max_objects' cannot be zero") - ); - ensure_ne!( - max_object_size, - Some(Uint128::zero()), - StdError::generic_err("'max_object_size' cannot be zero") - ); - ensure!( - !matches!( - (max_total_size, max_object_size), - (Some(max_total_size), Some(max_object_size)) if max_total_size < max_object_size - ), - StdError::generic_err("'max_total_size' cannot be less than 'max_object_size'") - ); - - Ok(BucketLimits { - max_total_size, - max_objects, - max_object_size, - max_object_pins, - }) - } -} - -impl TryFrom for BucketLimits { - type Error = StdError; - - fn try_from(limits: msg::BucketLimits) -> StdResult { - BucketLimits::try_new( - limits.max_total_size, - limits.max_objects, - limits.max_object_size, - limits.max_object_pins, - ) - } -} -/// Pagination is the type carrying configuration for paginated queries. -#[derive(Clone, Debug, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -pub struct Pagination { - /// The maximum elements a page can contain. - pub max_page_size: u32, - /// The default number of elements in a page. - pub default_page_size: u32, -} - -const MAX_PAGE_MAX_SIZE: u32 = u32::MAX - 1; - -impl Pagination { - fn try_new(max_page_size: u32, default_page_size: u32) -> StdResult { - ensure!( - max_page_size <= MAX_PAGE_MAX_SIZE, - StdError::generic_err("'max_page_size' cannot exceed 'u32::MAX - 1'") - ); - ensure_ne!( - default_page_size, - 0, - StdError::generic_err("'default_page_size' cannot be zero") - ); - ensure!( - default_page_size <= max_page_size, - StdError::generic_err("'default_page_size' cannot exceed 'max_page_size'") - ); - - Ok(Pagination { - max_page_size, - default_page_size, - }) - } -} - -impl From for PaginationConfig { - fn from(value: Pagination) -> Self { - PaginationConfig { - max_page_size: value.max_page_size, - default_page_size: value.default_page_size, - } - } -} - -impl TryFrom for Pagination { - type Error = StdError; - - fn try_from(value: PaginationConfig) -> StdResult { - Pagination::try_new(value.max_page_size, value.default_page_size) - } -} - -pub const BUCKET: Item = Item::new("bucket"); - -#[derive(Clone, Debug, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -pub struct Object { - /// The id of the object. - pub id: Hash, - /// The size of the object. - pub size: Uint128, - /// The number of pin on this object. - pub pin_count: Uint128, - /// The compression algorithm used to compress the object. - pub compression: CompressionAlgorithm, - /// The size of the object after compression. - pub compressed_size: Uint128, -} - -impl From<&Object> for ObjectResponse { - fn from(object: &Object) -> Self { - ObjectResponse { - id: object.id.to_string(), - size: object.size, - is_pinned: object.pin_count > Uint128::zero(), - compressed_size: object.compressed_size, - } - } -} - -pub const OBJECT: Map = Map::new("OBJECT"); - -#[derive(Clone, Deserialize, Serialize)] -pub struct Pin { - /// The id of the object. - pub id: Hash, - /// The address that pinned the object. - pub address: Addr, -} - -/// The primary key for a pin is a tuple of the object id and the address that pinned it. -pub type PinPK = (Hash, Addr); - -pub struct PinIndexes<'a> { - pub object: MultiIndex<'a, Hash, Pin, PinPK>, - pub address: MultiIndex<'a, Addr, Pin, PinPK>, -} - -impl IndexList for PinIndexes<'_> { - fn get_indexes(&'_ self) -> Box> + '_> { - let object: &dyn Index = &self.object; - let address: &dyn Index = &self.address; - Box::new(vec![object, address].into_iter()) - } -} - -pub fn pins<'a>() -> IndexedMap> { - IndexedMap::new( - "PIN", - PinIndexes { - object: MultiIndex::new(|_, p| p.id.clone(), "PIN", "PIN__OBJECT"), - address: MultiIndex::new(|_, p| p.address.clone(), "PIN", "PIN__ADDRESS"), - }, - ) -} diff --git a/docs/axone-cognitarium.md b/docs/axone-cognitarium.md deleted file mode 100644 index d68d1810..00000000 --- a/docs/axone-cognitarium.md +++ /dev/null @@ -1,928 +0,0 @@ -# Cognitarium - -A [CosmWasm](https://cosmwasm.com/) Smart Contract which enables the storage and querying of Semantic data using [RDF (Resource Description Framework)](https://en.wikipedia.org/wiki/Resource_Description_Framework), which represents information as semantic triples. - -## Purpose - -The Smart Contract operates as a [semantic database](https://en.wikipedia.org/wiki/Triplestore), adept at storing and fetching [RDF triples](https://en.wikipedia.org/wiki/Semantic_triple) via semantic queries. It can be deployed on any blockchain within the [Cosmos blockchains](https://cosmos.network/) network, utilizing the [CosmWasm](https://cosmwasm.com/) framework. - -The key features of the contract include: - -**Insertion of RDF Triples:** -This functionality enables the insertion of new data in the form of [RDF triples](https://en.wikipedia.org/wiki/Semantic_triple) onto the blockchain, ensuring secure and tamper-proof storage. The Smart Contract supports inserting these triples in various serialization formats including [RDF/XML](https://en.wikipedia.org/wiki/RDF/XML), [Turtle](https://www.w3.org/TR/turtle/), [N-Triples](https://www.w3.org/TR/n-triples/) and [N-Quads](https://www.w3.org/TR/n-quads/). - -**Removal of RDF Triples:** -This functionality enables the selective deletion of RDF triples from the on-chain store. Users can specify patterns or criteria that identify the triples to be removed, ensuring precise and targeted removal of data. - -**Querying of RDF Triples:** -The Smart Contract provides powerful on-chain querying capabilities, allowing users to retrieve specific RDF triples stored on the blockchain. This is done using a variation of [SPARQL](https://www.w3.org/TR/sparql11-query/), a specialized query language designed for retrieving and manipulating data stored in RDF format. Users can specify their search criteria in the query, and the Smart Contract will return the matching RDF triples, directly accessing the on-chain data. This feature supports various serialization formats for the output, such as Turtle, N-Triples, N-Quads, and RDF/XML, offering flexibility in how the retrieved data is presented and used. - -**Policies of the Store:** -The Smart Contract includes a straightforward yet effective policies functionality to manage the capacity and usage of the on-chain storage effectively. These policies ensure efficient operation and prevent misuse or overuse of the Smart Contract. For instance: - -- Maximum Triples: Caps the total number of RDF triples the store can hold, preventing database overload. -- Storage Size Limit: Sets an upper bound on the store's data size in bytes, managing blockchain resource use. -- Query Size Limit: Restricts the size or complexity of queries to maintain fast and reliable data retrieval. -- Insert Data Limit: Limits the size of data (in bytes) that can be added in a single transaction, ensuring smooth and efficient data insertion. - -## Rationale - -The data preserved in the blockchain holds significant value due to its derivation from a distributed consensus, rendering it a reliable source for decision-making, applicable to both on-chain and off-chain scenarios. - -To effectively utilize this data, it's essential to adopt representation models that cater to diverse requirements. The Smart Contract Cognitarium provides such a model, facilitating the depiction of intricate and evolving semantic connections within a highly interconnected dataset. This approach transforms the data into a Knowledge Graph, enabling an accurate portrayal of existing facts and fostering the generation of new insights. - -## Play - -### Model your data with RDF - -[RDF](https://www.w3.org/RDF/) encodes information in triple structures. The basic structure of an RDF triple is `subject-predicate-object`, much like a simple sentence in the English language. - -1. **Subject**: The subject is the entity or resource the statement is about. It's typically a URI ([Uniform Resource Identifier](https://en.wikipedia.org/wiki/Uniform_Resource_Identifier)) which uniquely identifies a resource. -2. **Predicate**: The predicate (also called a property) is a specific aspect, characteristic, attribute, or relation that describes the subject. It's also typically a URI. -3. **Object**: The object is the value of the attribute defined by the predicate for the subject. It can be a URI or a literal (such as a string or a number) and may also include additional information such as a language tag or a datatype. - -In RDF, **prefixes** are used as a shorthand notation for long URIs to make the data more readable and less verbose. They're similar to namespaces in programming languages. For instance, instead of writing `http://www.w3.org/2001/XMLSchema#integer`, you could declare a prefix `xsd` to represent the `http://www.w3.org/2001/XMLSchema#` URI and then use `xsd:integer`. - -[Turtle (Terse RDF Triple Language)](https://www.w3.org/TR/turtle/) is a syntax that allows RDF to be completely written in a compact and natural text form, with abbreviations for common usage patterns and datatypes. - -Here's an RDF triple written in Turtle format (`.ttl` file): - -```turtle -@prefix ex: . -@prefix xsd: . - -ex:Alice ex:hasAge "30"^^xsd:integer . -``` - -In this example: - -- **`ex:Alice`** is the subject (using `ex` as a prefix for the `http://example.com/stuff/1.0/` URI). -- **`ex:hasAge`** is the predicate. -- **`"30"^^xsd:integer`** is the object, a literal of datatype integer (using **`xsd`** as a prefix for the XML Schema Datatype namespace). - -In the Turtle syntax, the semicolon (**`;`**) is used as a shorthand to reduce verbosity when multiple predicates and objects have the same subject. It allows you to write multiple predicates and objects for the same subject without having to repeat the subject. -The comma (**`,`**) is used as a shorthand for reducing verbosity when the same subject and predicate have multiple objects. - -Suppose we want to express that Alice is 30 years old person, and her email is `alice@example.com`: - -```turtle -@prefix ex: . -@prefix xsd: . - -ex:Alice a ; - ex:hasAge "30"^^xsd:integer ; - ex:hasEmail "alice@example.com" . -``` - -:::tip -The lowercase "a" is a special abbreviation for the RDF type property, which states that a resource is an instance of a particular class. This is essentially equivalent to **``**, and it's used to indicate the type of a resource. -::: - -The same RDF triple can be expressed in RDF/XML format (`.rdf.xml` file): - -```xml - - - - 30 - alice@example.com - - -``` - -### Instantiate the Smart Contract - -Let's initiate a new instance of Smart Contract and input some RDF triples into it. The `axone-cognitarium` can be set up in the following manner. Please consult the schema for additional details regarding configuration settings. - -```bash -axoned tx wasm instantiate $CODE_ID \ - --from $ADDR \ - --label "my-rdf-storage" \ - --admin $ADMIN_ADDR \ - --gas 1000000 \ - '{}' -``` - -:::tip -You can provide some limitation parameters to restrict usage for both execute and query messages. For instance, you can set a maximum number of triples that can be stored in the smart contract, or a maximum size of data that can be inserted in a single transaction. - -The default values are: - -```json -{ - "limits": { - "max_byte_size": "340282366920938463463374607431768211455", - "max_insert_data_byte_size": "340282366920938463463374607431768211455", - "max_insert_data_triple_count": "340282366920938463463374607431768211455", - "max_query_limit": 30, - "max_query_variable_count": 30, - "max_triple_byte_size": "340282366920938463463374607431768211455", - "max_triple_count": "340282366920938463463374607431768211455" - } -} -``` - -::: - -### Insert RDF triples - -To insert RDF triples, you need to send an `InsertData` message through the `cognitarium` smart contract you've already instantiated. For this operation, your inputs should include the data of the triples, encoded in [base64](https://en.wikipedia.org/wiki/Base64), as well as the format. The format options available are: - -- `turtle` (default) -- `rdf_xml` -- `n_triples` -- `n_quads` - -Let's consider the following example of data in Turtle format, contained within a file named `data.ttl`. It describes a small network of people and their relationships, such as name, title, and whom they know. - -```turtle -@prefix : . -@prefix foaf: . -@prefix schema: . - -:alice a foaf:Person ; - foaf:name "Alice" ; - foaf:knows :bob ; - schema:email "alice@example.org" . - -:bob a foaf:Person ; - foaf:name "Bob" ; - foaf:knows :alice, :carol ; - schema:jobTitle "Software Developer" . - -:carol a foaf:Person ; - foaf:name "Carol" ; - schema:jobTitle "Data Scientist" ; - foaf:knows :bob . -``` - -You can insert this data into the `cognitarium` smart contract with the following command: - -```bash -axoned tx wasm execute $CONTRACT_ADDR \ - --from axone1cu9wzlcyyxpek20jaqfwzu3llzjgx34cqf94yj \ - --gas 10000000 \ - "{\"insert_data\":{\"format\": \"turtle\", \"data\": \"$(cat data.ttl | base64 | tr -d '\n\r')\"}}" -``` - -With the transaction hash we can query the number of triples inserted: - -```bash -axoned query tx $TX_HASH -ojson | - jq -r '.events[] | select(.type == "wasm") | .attributes[] | select(.key == "triple_count") | .value' -``` - -### Query RDF triples - -Now that we've populated the axone-cognitarium with several triples, let's explore how to retrieve this data. We can utilize the Select query message for this purpose. If you're familiar with [SPARQL](https://www.w3.org/TR/rdf-sparql-query/), you'll find the process quite intuitive. - -A `select` query on a `cognitarium` instance enables you to fetch and filter the data. The `select.query` JSON should contain the following: - -- `prefixes` array: to declare a `prefix` and its related `namespace` -- `limit`: the number of elements to return -- `where`: filters and variable declarations -- `select` array: all `variable` names you declared in `where` you want to get - -`where` should be an array of elements specifying triple filterings. You have to specify `subject`, `predicate` and `object` as a `variable`, or, alternatively, a `prefixed` or `full` `named_node`. - -`object` can also be a `simple` `literal`. - -The following query will select all the triples `subject`, `predicate` and `object` from the store: - -```json -{ - "select": { - "query": { - "prefixes": [], - "select": [ - { - "variable": "subject" - }, - { - "variable": "predicate" - }, - { - "variable": "object" - } - ], - "where": [ - { - "simple": { - "triple_pattern": { - "subject": { - "variable": "subject" - }, - "predicate": { - "variable": "predicate" - }, - "object": { - "variable": "object" - } - } - } - } - ], - "limit": null - } - } -} -``` - -It's semantically equivalent to the following SPARQL query: - -```sparql -SELECT ?subject ?predicate ?object -WHERE { - ?subject ?predicate ?object -} -``` - -This query can be executed on the cognitarium smart contract using the command below: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - '{"select":{"query":{"prefixes":[],"select":[{"variable":"subject"},{"variable":"predicate"},{"variable":"object"}],"where":[{"simple":{"triple_pattern":{"subject":{"variable":"subject"},"predicate":{"variable":"predicate"},"object":{"variable":"object"}}}}],"limit":null}}}' -``` - -Now, let's try something more interesting. Let's retrieve the names of people and their job titles, but only for those who know at least one other person in the network. This query introduces filtering based on relationships. - -Here's the query: - -```json -{ - "select": { - "query": { - "prefixes": [ - { "foaf": "http://xmlns.com/foaf/0.1/" }, - { "schema": "http://schema.org/" } - ], - "select": [ - { - "variable": "personName" - }, - { - "variable": "jobTitle" - } - ], - "where": [ - { - "simple": { - "triple_pattern": { - "subject": { - "variable": "person" - }, - "predicate": { - "node": { - "named_node": { - "full": "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" - } - } - }, - "object": { - "node": { - "named_node": { - "prefixed": "foaf:Person" - } - } - } - } - } - }, - { - "simple": { - "triple_pattern": { - "subject": { - "variable": "person" - }, - "predicate": { - "node": { - "named_node": { - "prefixed": "foaf:Name" - } - } - }, - "object": { - "variable": "personName" - } - } - } - }, - { - "simple": { - "triple_pattern": { - "subject": { - "variable": "person" - }, - "predicate": { - "node": { - "named_node": { - "prefixed": "schema:jobTitle" - } - } - }, - "object": { - "variable": "jobTitle" - } - } - } - }, - { - "simple": { - "triple_pattern": { - "subject": { - "variable": "person" - }, - "predicate": { - "node": { - "named_node": { - "prefixed": "foaf:knows" - } - } - }, - "object": { - "variable": "knownPerson" - } - } - } - } - ], - "limit": null - } - } -} -``` - -It's semantically equivalent to the following SPARQL query: - -```sparql -PREFIX foaf: -PREFIX schema: - -SELECT ?personName ?jobTitle -WHERE { - ?person a foaf:Person . - ?person foaf:name ?personName . - ?person schema:jobTitle ?jobTitle . - ?person foaf:knows ?knownPerson . -} -``` - -This query can be executed on the cognitarium smart contract using the command below: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - '{"select":{"query":{"prefixes":[{"foaf":"http://xmlns.com/foaf/0.1/"},{"schema":"http://schema.org/"}],"select":[{"variable":"personName"},{"variable":"jobTitle"}],"where":[{"simple":{"triple_pattern":{"subject":{"variable":"person"},"predicate":{"node":{"named_node":{"full":"http://www.w3.org/1999/02/22-rdf-syntax-ns#type"}}},"object":{"node":{"named_node":{"prefixed":"foaf:Person"}}}}}},{"simple":{"triple_pattern":{"subject":{"variable":"person"},"predicate":{"node":{"named_node":{"prefixed":"foaf:Name"}}},"object":{"variable":"personName"}}}},{"simple":{"triple_pattern":{"subject":{"variable":"person"},"predicate":{"node":{"named_node":{"prefixed":"schema:jobTitle"}}},"object":{"variable":"jobTitle"}}}},{"simple":{"triple_pattern":{"subject":{"variable":"person"},"predicate":{"node":{"named_node":{"prefixed":"foaf:knows"}}},"object":{"variable":"knownPerson"}}}}],"limit":null}}}' -``` - -## InstantiateMsg - -Instantiate message - -| parameter | description | -| ------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `limits` | **[StoreLimitsInput](#storelimitsinput)**. Limitations regarding store usage. | -| `limits.max_byte_size` | **[Uint128](#uint128)**. The maximum number of bytes the store can contain. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. Default to [Uint128::MAX] if not set, which can be considered as no limit.
**Default:** `"340282366920938463463374607431768211455"` | -| `limits.max_insert_data_byte_size` | **[Uint128](#uint128)**. The maximum number of bytes an insert data query can contain. Default to [Uint128::MAX] if not set, which can be considered as no limit.
**Default:** `"340282366920938463463374607431768211455"` | -| `limits.max_insert_data_triple_count` | **[Uint128](#uint128)**. The maximum number of triples an insert data query can contain (after parsing). Default to [Uint128::MAX] if not set, which can be considered as no limit.
**Default:** `"340282366920938463463374607431768211455"` | -| `limits.max_query_limit` | **integer**. The maximum limit of a query, i.e. the maximum number of triples returned by a select query. Default to 30 if not set.
**Default:** `30` | -| `limits.max_query_variable_count` | **integer**. The maximum number of variables a query can select. Default to 30 if not set.
**Default:** `30` | -| `limits.max_triple_byte_size` | **[Uint128](#uint128)**. The maximum number of bytes the store can contain for a single triple. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. The limit is used to prevent storing very large triples, especially literals. Default to [Uint128::MAX] if not set, which can be considered as no limit.
**Default:** `"340282366920938463463374607431768211455"` | -| `limits.max_triple_count` | **[Uint128](#uint128)**. The maximum number of triples the store can contain. Default to [Uint128::MAX] if not set, which can be considered as no limit.
**Default:** `"340282366920938463463374607431768211455"` | - -## ExecuteMsg - -Execute messages - -### ExecuteMsg::InsertData - -Insert the data as RDF triples in the store. For already existing triples it acts as no-op. - -Only the smart contract owner (i.e. the address who instantiated it) is authorized to perform this action. - -| parameter | description | -| -------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `insert_data` | _(Required.) _ **object**. | -| `insert_data.data` | _(Required.) _ **[Binary](#binary)**. The data to insert. The data must be serialized in the format specified by the `format` field. And the data are subject to the limitations defined by the `limits` specified at contract instantiation. | -| `insert_data.format` | **[DataFormat](#dataformat)\|null**. The data format in which the triples are serialized. If not provided, the default format is [Turtle](https://www.w3.org/TR/turtle/) format. | - -### ExecuteMsg::DeleteData - -Delete the data (RDF triples) from the store matching the patterns defined by the provided query. For non-existing triples it acts as no-op. - -Example: `json { "prefixes": [ { "prefix": "foaf", "namespace": "http://xmlns.com/foaf/0.1/" } ], "delete": [ { "subject": { "variable": "s" }, "predicate": { "variable": "p" }, "object": { "variable": "o" } } ], "where": [ { "simple": { "triplePattern": { "subject": { "variable": "s" }, "predicate": { "namedNode": {"prefixed": "foaf:givenName"} }, "object": { "literal": { "simple": "Myrddin" } } } } }, { "simple": { "triplePattern": { "subject": { "variable": "s" }, "predicate": { "variable": "p" }, "object": { "variable": "o" } } } } ] ` - -Only the smart contract owner (i.e. the address who instantiated it) is authorized to perform this action. - -| parameter | description | -| ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `delete_data` | _(Required.) _ **object**. | -| `delete_data.delete` | _(Required.) _ **Array<[TripleDeleteTemplate](#tripledeletetemplate)>**. Specifies the specific triple templates to delete. If nothing is provided and the `where` clause is a single Bgp, the patterns are used for deletion. | -| `delete_data.prefixes` | _(Required.) _ **Array<[Prefix](#prefix)>**. The prefixes used in the operation. | -| `delete_data.where` | **[WhereClause](#whereclause)\|null**. Defines the patterns that data (RDF triples) should match in order for it to be considered for deletion, if any. | - -## QueryMsg - -Query messages - -### QueryMsg::Store - -Returns information about the triple store. - -| parameter | description | -| --------- | -------------------------- | -| `store` | _(Required.) _ **object**. | - -### QueryMsg::Select - -Returns the resources matching the criteria defined by the provided query. - -| parameter | description | -| -------------- | --------------------------------------------------------------------- | -| `select` | _(Required.) _ **object**. | -| `select.query` | _(Required.) _ **[SelectQuery](#selectquery)**. The query to execute. | - -### QueryMsg::Describe - -Returns a description of the resource identified by the provided IRI as a set of RDF triples serialized in the provided format. - -| parameter | description | -| ----------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `describe` | _(Required.) _ **object**. | -| `describe.format` | **[DataFormat](#dataformat)\|null**. The format in which the triples are serialized. If not provided, the default format is [Turtle](https://www.w3.org/TR/turtle/) format. | -| `describe.query` | _(Required.) _ **[DescribeQuery](#describequery)**. The query to execute. | - -### QueryMsg::Construct - -Returns the resources matching the criteria defined by the provided query as a set of RDF triples serialized in the provided format. - -| parameter | description | -| ------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `construct` | _(Required.) _ **object**. | -| `construct.format` | **[DataFormat](#dataformat)\|null**. The format in which the triples are serialized. If not provided, the default format is [Turtle](https://www.w3.org/TR/turtle/) format. | -| `construct.query` | _(Required.) _ **[ConstructQuery](#constructquery)**. The query to execute. | - -## Responses - -### construct - -Represents the response of a [QueryMsg::Construct] query. - -| property | description | -| -------- | ---------------------------------------------------------------------------------- | -| `data` | _(Required.) _ **[Binary](#binary)**. The data serialized in the specified format. | -| `format` | _(Required.) _ **[DataFormat](#dataformat)**. The format of the data. | - -### describe - -Represents the response of a [QueryMsg::Describe] query. - -| property | description | -| -------- | ---------------------------------------------------------------------------------- | -| `data` | _(Required.) _ **[Binary](#binary)**. The data serialized in the specified format. | -| `format` | _(Required.) _ **[DataFormat](#dataformat)**. The format of the data. | - -### select - -Represents the response of a [QueryMsg::Select] query. - -| property | description | -| ------------------ | --------------------------------------------------------------------------------------------------------------- | -| `head` | _(Required.) _ **[Head](#head)**. The head of the response, i.e. the set of variables mentioned in the results. | -| `head.vars` | **Array<string>**. The variables selected in the query. | -| `results` | _(Required.) _ **[Results](#results)**. The results of the select query. | -| `results.bindings` | **Array<object>**. The bindings of the results. | - -### store - -Contains information related to triple store. - -| property | description | -| ------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `limits` | _(Required.) _ **[StoreLimits](#storelimits)**. The store limits. | -| `limits.max_byte_size` | **[Uint128](#uint128)**. The maximum number of bytes the store can contain. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. | -| `limits.max_insert_data_byte_size` | **[Uint128](#uint128)**. The maximum number of bytes an insert data query can contain. | -| `limits.max_insert_data_triple_count` | **[Uint128](#uint128)**. The maximum number of triples an insert data query can contain (after parsing). | -| `limits.max_query_limit` | **integer**. The maximum limit of a query, i.e. the maximum number of triples returned by a select query. | -| `limits.max_query_variable_count` | **integer**. The maximum number of variables a query can select. | -| `limits.max_triple_byte_size` | **[Uint128](#uint128)**. The maximum number of bytes the store can contain for a single triple. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. The limit is used to prevent storing very large triples, especially literals. | -| `limits.max_triple_count` | **[Uint128](#uint128)**. The maximum number of triples the store can contain. | -| `owner` | _(Required.) _ **string**. The store owner. | -| `stat` | _(Required.) _ **[StoreStat](#storestat)**. The store current usage. | -| `stat.byte_size` | **[Uint128](#uint128)**. The total triple size in the store, in bytes. | -| `stat.namespace_count` | **[Uint128](#uint128)**. The total number of IRI namespace present in the store. | -| `stat.triple_count` | **[Uint128](#uint128)**. The total number of triple present in the store. | - -## Definitions - -### Bgp - -Represents a basic graph pattern expressed as a set of triple patterns. - -| property | description | -| -------------- | ---------------------------------------------------------------- | -| `bgp` | _(Required.) _ **object**. | -| `bgp.patterns` | _(Required.) _ **Array<[TriplePattern](#triplepattern)>**. | - -### Binary - -A string containing Base64-encoded data. - -| type | -| ----------- | -| **string**. | - -### BlankNode - -An RDF [blank node](https://www.w3.org/TR/rdf11-concepts/#dfn-blank-node). - -| property | description | -| ------------ | -------------------------- | -| `blank_node` | _(Required.) _ **string**. | - -### ConstructQuery - -Represents a CONSTRUCT query over the triple store, allowing to retrieve a set of triples serialized in a specific format. - -| property | description | -| ----------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `construct` | _(Required.) _ **Array<[TripleConstructTemplate](#tripleconstructtemplate)>**. The triples to construct. If nothing is provided and the `where` clause is a single Bgp, the patterns are used for construction. | -| `prefixes` | _(Required.) _ **Array<[Prefix](#prefix)>**. The prefixes used in the query. | -| `where` | _(Required.) _ **[WhereClause](#whereclause)**. The WHERE clause. This clause is used to specify the triples to construct using variable bindings. | - -### DataFormat - -Represents the format in which the data are serialized, for example when returned by a query or when inserted in the store. - -| variant | description | -| ----------------------- | --------------------------------------------------------------------------------------------- | -| [RDF XML](#rdf-xml) | **string**: `rdf_xml`. Output in [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) format. | -| [Turtle](#turtle) | **string**: `turtle`. Output in [Turtle](https://www.w3.org/TR/turtle/) format. | -| [N-Triples](#n-triples) | **string**: `n_triples`. Output in [N-Triples](https://www.w3.org/TR/n-triples/) format. | -| [N-Quads](#n-quads) | **string**: `n_quads`. Output in [N-Quads](https://www.w3.org/TR/n-quads/) format. | - -### DescribeQuery - -Represents a DESCRIBE query over the triple store, allowing to retrieve a description of a resource as a set of triples serialized in a specific format. - -| property | description | -| ---------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | -| `prefixes` | _(Required.) _ **Array<[Prefix](#prefix)>**. The prefixes used in the query. | -| `resource` | _(Required.) _ **[VarOrNamedNode](#varornamednode)**. The resource to describe given as a variable or a node. | -| `where` | **[WhereClause](#whereclause)\|null**. The WHERE clause. This clause is used to specify the resource identifier to describe using variable bindings. | - -### Expression - -Represents a logical combination of operations whose evaluation results in a term. - -| variant | description | -| --------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| undefined | **object**. A named node constant. | -| undefined | **object**. A literal constant. | -| undefined | **object**. A variable that must be bound for evaluation. | -| undefined | **object**. Logical conjunction of expressions. All expressions must evaluate to true for the conjunction to be true. If the conjunction is empty, it is considered true. | -| undefined | **object**. Logical disjunction of expressions. At least one expression must evaluate to true for the disjunction to be true. If the disjunction is empty, it is considered false. | -| undefined | **object**. Equality comparison. | -| undefined | **object**. Greater than comparison. | -| undefined | **object**. Greater or equal comparison. | -| undefined | **object**. Less than comparison. | -| undefined | **object**. Less or equal comparison. | -| undefined | **object**. Negation of an expression. | - -### Filter - -Filters the inner clause matching the expression. The solutions coming from the inner clause that do not match the expression are discarded. The variables provided in the inner clause are available in the filter expression. - -| property | description | -| -------------- | ---------------------------------------------------------------------------------------------------------- | -| `filter` | _(Required.) _ **object**. | -| `filter.expr` | _(Required.) _ **object\|object\|object\|object\|object\|object\|object\|object\|object\|object\|object**. | -| `filter.inner` | _(Required.) _ **[Bgp](#bgp)\|[LateralJoin](#lateraljoin)\|[Filter](#filter)**. | - -### Full - -A full IRI. - -| property | description | -| -------- | -------------------------- | -| `full` | _(Required.) _ **string**. | - -### Head - -Represents the head of a [SelectResponse]. - -| property | description | -| -------- | ---------------------------------------------------------------------------- | -| `vars` | _(Required.) _ **Array<string>**. The variables selected in the query. | - -### IRI - -Represents an IRI. - -| variant | description | -| --------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [Prefixed](#prefixed) | **object**. An IRI prefixed with a prefix. The prefixed IRI is expanded to a full IRI using the prefix definition specified in the query. For example, the prefixed IRI `rdf:type` is expanded to `http://www.w3.org/1999/02/22-rdf-syntax-ns#type`. | -| [Full](#full) | **object**. A full IRI. | - -### LanguageTaggedString - -A [language-tagged string](https://www.w3.org/TR/rdf11-concepts/#dfn-language-tagged-string) - -| property | description | -| --------------------------------- | ------------------------------------------------------------------------------------------------------ | -| `language_tagged_string` | _(Required.) _ **object**. | -| `language_tagged_string.language` | _(Required.) _ **string**. The [language tag](https://www.w3.org/TR/rdf11-concepts/#dfn-language-tag). | -| `language_tagged_string.value` | _(Required.) _ **string**. The [lexical form](https://www.w3.org/TR/rdf11-concepts/#dfn-lexical-form). | - -### LateralJoin - -Evaluates right for all result row of left - -| property | description | -| -------------------- | ------------------------------------------------------------------------------- | -| `lateral_join` | _(Required.) _ **object**. | -| `lateral_join.left` | _(Required.) _ **[Bgp](#bgp)\|[LateralJoin](#lateraljoin)\|[Filter](#filter)**. | -| `lateral_join.right` | _(Required.) _ **[Bgp](#bgp)\|[LateralJoin](#lateraljoin)\|[Filter](#filter)**. | - -### Literal - -An RDF [literal](https://www.w3.org/TR/rdf11-concepts/#dfn-literal). - -| variant | description | -| --------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | -| [Simple](#simple) | **object**. A [simple literal](https://www.w3.org/TR/rdf11-concepts/#dfn-simple-literal) without datatype or language form. | -| [LanguageTaggedString](#languagetaggedstring) | **object**. A [language-tagged string](https://www.w3.org/TR/rdf11-concepts/#dfn-language-tagged-string) | -| [TypedValue](#typedvalue) | **object**. A value with a datatype. | - -### N-Quads - -Output in [N-Quads](https://www.w3.org/TR/n-quads/) format. - -| literal | -| ----------- | -| `"n_quads"` | - -### N-Triples - -Output in [N-Triples](https://www.w3.org/TR/n-triples/) format. - -| literal | -| ------------- | -| `"n_triples"` | - -### NamedNode - -An RDF [IRI](https://www.w3.org/TR/rdf11-concepts/#dfn-iri). - -| property | description | -| ------------ | -------------------------------------------------------- | -| `named_node` | _(Required.) _ **[Prefixed](#prefixed)\|[Full](#full)**. | - -### Node - -Represents either an IRI (named node) or a blank node. - -| variant | description | -| ----------------------- | -------------------------------------------------------------------------------------- | -| [NamedNode](#namednode) | **object**. An RDF [IRI](https://www.w3.org/TR/rdf11-concepts/#dfn-iri). | -| [BlankNode](#blanknode) | **object**. An RDF [blank node](https://www.w3.org/TR/rdf11-concepts/#dfn-blank-node). | - -### Prefix - -Represents a prefix, i.e. a shortcut for a namespace used in a query. - -| property | description | -| ----------- | -------------------------------------------------------------------- | -| `namespace` | _(Required.) _ **string**. The namespace associated with the prefix. | -| `prefix` | _(Required.) _ **string**. The prefix. | - -### Prefixed - -An IRI prefixed with a prefix. The prefixed IRI is expanded to a full IRI using the prefix definition specified in the query. For example, the prefixed IRI `rdf:type` is expanded to `http://www.w3.org/1999/02/22-rdf-syntax-ns#type`. - -| property | description | -| ---------- | -------------------------- | -| `prefixed` | _(Required.) _ **string**. | - -### RDF XML - -Output in [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) format. - -| literal | -| ----------- | -| `"rdf_xml"` | - -### Results - -Represents the results of a [SelectResponse]. - -| property | description | -| ---------- | -------------------------------------------------------------------- | -| `bindings` | _(Required.) _ **Array<object>**. The bindings of the results. | - -### SelectItem - -Represents an item to select in a [SelectQuery]. - -| variant | description | -| --------------------- | ---------------------------------- | -| [Variable](#variable) | **object**. Represents a variable. | - -### SelectQuery - -Represents a SELECT query over the triple store, allowing to select variables to return and to filter the results. - -| property | description | -| ---------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `limit` | **integer\|null**. The maximum number of results to return. If `None`, there is no limit. Note: the value of the limit cannot exceed the maximum query limit defined in the store limitations. | -| `prefixes` | _(Required.) _ **Array<[Prefix](#prefix)>**. The prefixes used in the query. | -| `select` | _(Required.) _ **Array<[SelectItem](#selectitem)>**. The items to select. Note: the number of items to select cannot exceed the maximum query variable count defined in the store limitations. | -| `where` | _(Required.) _ **[WhereClause](#whereclause)**. The WHERE clause. If `None`, there is no WHERE clause, i.e. all triples are returned without filtering. | - -### Simple - -A [simple literal](https://www.w3.org/TR/rdf11-concepts/#dfn-simple-literal) without datatype or language form. - -| property | description | -| -------- | -------------------------- | -| `simple` | _(Required.) _ **string**. | - -### StoreLimits - -Contains limitations regarding store usages. - -| property | description | -| ------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `max_byte_size` | _(Required.) _ **[Uint128](#uint128)**. The maximum number of bytes the store can contain. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. | -| `max_insert_data_byte_size` | _(Required.) _ **[Uint128](#uint128)**. The maximum number of bytes an insert data query can contain. | -| `max_insert_data_triple_count` | _(Required.) _ **[Uint128](#uint128)**. The maximum number of triples an insert data query can contain (after parsing). | -| `max_query_limit` | _(Required.) _ **integer**. The maximum limit of a query, i.e. the maximum number of triples returned by a select query. | -| `max_query_variable_count` | _(Required.) _ **integer**. The maximum number of variables a query can select. | -| `max_triple_byte_size` | _(Required.) _ **[Uint128](#uint128)**. The maximum number of bytes the store can contain for a single triple. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. The limit is used to prevent storing very large triples, especially literals. | -| `max_triple_count` | _(Required.) _ **[Uint128](#uint128)**. The maximum number of triples the store can contain. | - -### StoreLimitsInput - -Contains requested limitations regarding store usages. - -| property | description | -| ------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `max_byte_size` | **[Uint128](#uint128)**. The maximum number of bytes the store can contain. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `max_insert_data_byte_size` | **[Uint128](#uint128)**. The maximum number of bytes an insert data query can contain. Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `max_insert_data_triple_count` | **[Uint128](#uint128)**. The maximum number of triples an insert data query can contain (after parsing). Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `max_query_limit` | **integer**. The maximum limit of a query, i.e. the maximum number of triples returned by a select query. Default to 30 if not set. | -| `max_query_variable_count` | **integer**. The maximum number of variables a query can select. Default to 30 if not set. | -| `max_triple_byte_size` | **[Uint128](#uint128)**. The maximum number of bytes the store can contain for a single triple. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. The limit is used to prevent storing very large triples, especially literals. Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `max_triple_count` | **[Uint128](#uint128)**. The maximum number of triples the store can contain. Default to [Uint128::MAX] if not set, which can be considered as no limit. | - -### StoreStat - -Contains usage information about the triple store. - -| property | description | -| ----------------- | ----------------------------------------------------------------------------------------------- | -| `byte_size` | _(Required.) _ **[Uint128](#uint128)**. The total triple size in the store, in bytes. | -| `namespace_count` | _(Required.) _ **[Uint128](#uint128)**. The total number of IRI namespace present in the store. | -| `triple_count` | _(Required.) _ **[Uint128](#uint128)**. The total number of triple present in the store. | - -### TripleConstructTemplate - -Represents a triple template to be forged for a construct query. - -| property | description | -| ----------- | ----------------------------------------------------------------------------------------------- | -| `object` | _(Required.) _ **[VarOrNodeOrLiteral](#varornodeorliteral)**. The object of the triple pattern. | -| `predicate` | _(Required.) _ **[VarOrNamedNode](#varornamednode)**. The predicate of the triple pattern. | -| `subject` | _(Required.) _ **[VarOrNode](#varornode)**. The subject of the triple pattern. | - -### TripleDeleteTemplate - -Represents a triple template to be deleted. - -| property | description | -| ----------- | --------------------------------------------------------------------------------------------------------- | -| `object` | _(Required.) _ **[VarOrNamedNodeOrLiteral](#varornamednodeorliteral)**. The object of the triple pattern. | -| `predicate` | _(Required.) _ **[VarOrNamedNode](#varornamednode)**. The predicate of the triple pattern. | -| `subject` | _(Required.) _ **[VarOrNamedNode](#varornamednode)**. The subject of the triple pattern. | - -### TriplePattern - -Represents a triple pattern in a [SimpleWhereCondition]. - -| property | description | -| ----------- | ----------------------------------------------------------------------------------------------- | -| `object` | _(Required.) _ **[VarOrNodeOrLiteral](#varornodeorliteral)**. The object of the triple pattern. | -| `predicate` | _(Required.) _ **[VarOrNamedNode](#varornamednode)**. The predicate of the triple pattern. | -| `subject` | _(Required.) _ **[VarOrNode](#varornode)**. The subject of the triple pattern. | - -### Turtle - -Output in [Turtle](https://www.w3.org/TR/turtle/) format. - -| literal | -| ---------- | -| `"turtle"` | - -### TypedValue - -A value with a datatype. - -| property | description | -| ---------------------- | ----------------------------------------------------------------------------------------------------------- | -| `typed_value` | _(Required.) _ **object**. | -| `typed_value.datatype` | _(Required.) _ **[IRI](#iri)**. The [datatype IRI](https://www.w3.org/TR/rdf11-concepts/#dfn-datatype-iri). | -| `typed_value.value` | _(Required.) _ **string**. The [lexical form](https://www.w3.org/TR/rdf11-concepts/#dfn-lexical-form). | - -### URI - -Represents an IRI. - -| property | description | -| -------- | ----------------------------------------------------- | -| `type` | _(Required.) _ **string**. | -| `value` | _(Required.) _ **[IRI](#iri)**. The value of the IRI. | - -### Uint128 - -A string containing a 128-bit integer in decimal representation. - -| type | -| ----------- | -| **string**. | - -### Value - -| variant | description | -| ----------------------- | ---------------------------------------------------------------------------------- | -| [URI](#uri) | **object**. Represents an IRI. | -| [Literal](#literal) | **object**. Represents a literal S with optional language tag L or datatype IRI D. | -| [BlankNode](#blanknode) | **object**. Represents a blank node. | - -### VarOrNamedNode - -Represents either a variable or a named node (IRI). - -| variant | description | -| ----------------------- | ------------------------------------------------------------------------ | -| [Variable](#variable) | **object**. A variable. | -| [NamedNode](#namednode) | **object**. An RDF [IRI](https://www.w3.org/TR/rdf11-concepts/#dfn-iri). | - -### VarOrNamedNodeOrLiteral - -Represents either a variable, a named node or a literal. - -| variant | description | -| ----------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -| [Variable](#variable) | **object**. A variable. | -| [NamedNode](#namednode) | **object**. An RDF [IRI](https://www.w3.org/TR/rdf11-concepts/#dfn-iri). | -| [Literal](#literal) | **object**. An RDF [literal](https://www.w3.org/TR/rdf11-concepts/#dfn-literal), i.e. a simple literal, a language-tagged string or a typed value. | - -### VarOrNode - -Represents either a variable or a node. - -| variant | description | -| --------------------- | ------------------------------------------------ | -| [Variable](#variable) | **object**. A variable. | -| [Node](#node) | **object**. A node, i.e. an IRI or a blank node. | - -### VarOrNodeOrLiteral - -Represents either a variable, a node or a literal. - -| variant | description | -| --------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -| [Variable](#variable) | **object**. A variable. | -| [Node](#node) | **object**. A node, i.e. an IRI or a blank node. | -| [Literal](#literal) | **object**. An RDF [literal](https://www.w3.org/TR/rdf11-concepts/#dfn-literal), i.e. a simple literal, a language-tagged string or a typed value. | - -### Variable - -A variable. - -| property | description | -| ---------- | -------------------------- | -| `variable` | _(Required.) _ **string**. | - -### WhereClause - -Represents a WHERE clause, i.e. a set of conditions to filter the results. - -| variant | description | -| --------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [Bgp](#bgp) | **object**. Represents a basic graph pattern expressed as a set of triple patterns. | -| [LateralJoin](#lateraljoin) | **object**. Evaluates right for all result row of left | -| [Filter](#filter) | **object**. Filters the inner clause matching the expression. The solutions coming from the inner clause that do not match the expression are discarded. The variables provided in the inner clause are available in the filter expression. | - -### undefined - -A named node constant. - -| property | description | -| ------------ | -------------------------------------------------------- | -| `named_node` | _(Required.) _ **[Prefixed](#prefixed)\|[Full](#full)**. | - ---- - -_Rendered by [Fadroma](https://fadroma.tech) ([@fadroma/schema 1.1.0](https://www.npmjs.com/package/@fadroma/schema)) from `axone-cognitarium.json` (`7ba3e778bc6d4a8b`)_ diff --git a/docs/axone-dataverse.md b/docs/axone-dataverse.md deleted file mode 100644 index fc48be90..00000000 --- a/docs/axone-dataverse.md +++ /dev/null @@ -1,242 +0,0 @@ -# Dataverse - -## Overview - -The `dataverse` smart contract is responsible for overseeing and managing the Dataverse. - -## Dataverse - -The Dataverse is an ever-expanding universe that encompasses a wide range of digital resources. These include datasets, data processing algorithms, ML algorithm, storage resources, computational resources, identity management solutions, orchestration engines, oracles, and many other resources recorded on the blockchain. - -When the smart contract is instantiated, it creates a Dataverse instance. This instance is separated and isolated from any pre-existing ones, and as many dataverse instances as required can be created. - -## Zones - -Zones within the Dataverse represent distinct areas or domains where specific governance rules and policies are applied. These Zones are conceptual frameworks created to manage and organize resources under a unified set of regulations and permissions. - -Each Zone is defined by its unique identity and set of governing rules, which dictate how resources within it can be accessed, used, and shared. This approach allows for granular control over different segments of the Dataverse, catering to various requirements and use cases. By managing these Zones, the dataverse smart contract ensures that resources are utilized in compliance with the defined policies and consents, thereby maintaining order and integrity within the Dataverse. - -## Resources - -In the context of the Dataverse, Resources refer to a broad category of digital entities, which include Services and Digital Resources. - -- **Digital Resources**: This category extends to various digital entities such as datasets, algorithms, machine learning models, and other digital assets. Like Services, Digital Resources are identified by a URI in conjunction with the Service responsible for their provision. - -- **Services**: These are network-accessible functionalities like REST APIs, gRPC services, and other similar offerings. Each Service in the Dataverse is uniquely identified by its Uniform Resource Identifier (URI) and is associated with a specific Registrar responsible for its registration and management. - -## Decentralized Identifiers (DID) - -Decentralized Identifiers (DID) are a foundational element in the Dataverse, serving as unique, persistent, and globally resolvable identifiers that are fully under the control of the DID subject, which could be an individual, organization, or a any kind of resource (dataset, -algorithm, nft, ML algorithm). - -DIDs play a crucial role in the Dataverse by facilitating a trustable and interoperable identity mechanism. They enable the establishment of a verifiable and self-sovereign identity for resources, services, and entities within the ecosystem. - -## Claims - -Claims in the Dataverse context are assertions or statements made about a Resource identified by a DID. - -Claims play a pivotal role in the governance framework of the Dataverse. By leveraging knowledge derived from verifiable credentials, the governances established by Zones can evaluate the fulfillment of specific rules and compliance. This evaluation is critical in ensuring that the resources within the Dataverse adhere to the established norms, policies, and requirements. - -Claims are submitted in the form of [Verifiable Presentations (VPs)](https://www.w3.org/TR/vc-data-model/#presentations), which are aggregations of one or more [Verifiable Credentials (VCs)](https://www.w3.org/TR/vc-data-model/#what-is-a-verifiable-credential). - -## Dependencies - -Given its role and status, this smart contract serves as the primary access point for the AXONE protocol to manage all on-chain stored resources. To fulfill its tasks, the smart contract relies on other smart contracts within the AXONE ecosystem. Notably, it uses the `Cognitarium` smart contract for persisting the Dataverse representation in an ontological form and the `Law Stone` smart contract to establish governance rules. - -## InstantiateMsg - -`InstantiateMsg` is used to initialize a new instance of the dataverse. - -| parameter | description | -| ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `name` | _(Required.) _ **string**. A unique name to identify the dataverse instance. | -| `triplestore_config` | _(Required.) _ **[TripleStoreConfig](#triplestoreconfig)**. The configuration used to instantiate the triple store. | -| `triplestore_config.code_id` | **[Uint64](#uint64)**. The code id that will be used to instantiate the triple store contract in which to store dataverse semantic data. It must implement the cognitarium interface. | -| `triplestore_config.limits` | **[TripleStoreLimitsInput](#triplestorelimitsinput)**. Limitations regarding triple store usage. | - -## ExecuteMsg - -`ExecuteMsg` defines the set of possible actions that can be performed on the dataverse. - -This enum provides variants for registering services, datasets, and other operations related to the dataverse. - -### ExecuteMsg::SubmitClaims - -Submits new claims about a resource to the dataverse. - -The SubmitClaims message is a pivotal component in the dataverse, enabling entities to contribute new claims about various resources. A claim represents a statement made by an entity, referred to as the issuer, which could be a person, organization, or service. These claims pertain to a diverse range of resources, including digital resources, services, zones, or individuals, and are asserted as factual by the issuer. - -#### Format - -Claims are injected into the dataverse through Verifiable Credentials (VCs). - -Primarily, the claims leverage the AXONE ontology, which facilitates articulating assertions about widely acknowledged resources in the dataverse, including digital services, digital resources, zones, governance, and more. - -Additionally, other schemas may also be employed to supplement and enhance the validated knowledge contributed to these resources. - -#### Preconditions - -To maintain integrity and coherence in the dataverse, several preconditions are set for the submission of claims: - -1. **Format Requirement**: Claims must be encapsulated within Verifiable Credentials (VCs). - -2. **Unique Identifier Mandate**: Each Verifiable Credential within the dataverse must possess a unique identifier. - -3. **Issuer Verification**: Claims are accepted if they either: - Bear a verifiable issuer's signature to ensure authenticity. - Originate from the transaction sender, in which case the transaction signature serves as proof of authenticity. - -4. **Content**: The actual implementation supports the submission of a single Verifiable Credential, containing a single claim. - -#### Supported cryptographic proofs - -- `Ed25519Signature2018` - -- `Ed25519Signature2020` - -- `EcdsaSecp256k1Signature2019` - -- `DataIntegrity` with the following cryptosuites: `eddsa-2022`, `eddsa-rdfc-2022`. - -| parameter | description | -| ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `submit_claims` | _(Required.) _ **object**. | -| `submit_claims.claims` | _(Required.) _ **[Binary](#binary)**. The Verifiable Credential containing the claims. The claims must be serialized in the format specified by the `format` field. | -| `submit_claims.format` | **[RdfDatasetFormat](#rdfdatasetformat)\|null**. RDF dataset serialization format for the claims. If not provided, the default format is [N-Quads](https://www.w3.org/TR/n-quads/) format. | - -### ExecuteMsg::RevokeClaims - -Revoke or withdraw a previously submitted claims. - -#### Preconditions: - -1. **Identifier Existence**: The identifier of the claims must exist in the dataverse. - -| parameter | description | -| -------------------------- | ----------------------------------------------------------------------------- | -| `revoke_claims` | _(Required.) _ **object**. | -| `revoke_claims.identifier` | _(Required.) _ **string**. The unique identifier of the claims to be revoked. | - -## QueryMsg - -`QueryMsg` defines the set of possible queries that can be made to retrieve information about the dataverse. - -This enum provides variants for querying the dataverse's details and other related information. - -### QueryMsg::Dataverse - -Retrieves information about the current dataverse instance. - -| parameter | description | -| ----------- | -------------------------- | -| `dataverse` | _(Required.) _ **object**. | - -## Responses - -### dataverse - -DataverseResponse is the response of the Dataverse query. - -| property | description | -| --------------------- | ------------------------------------------------------------------- | -| `name` | _(Required.) _ **string**. The name of the dataverse. | -| `triplestore_address` | _(Required.) _ **[Addr](#addr)**. The cognitarium contract address. | - -## Definitions - -### Addr - -A human readable address. - -In Cosmos, this is typically bech32 encoded. But for multi-chain smart contracts no assumptions should be made other than being UTF-8 encoded and of reasonable length. - -This type represents a validated address. It can be created in the following ways 1. Use `Addr::unchecked(input)` 2. Use `let checked: Addr = deps.api.addr_validate(input)?` 3. Use `let checked: Addr = deps.api.addr_humanize(canonical_addr)?` 4. Deserialize from JSON. This must only be done from JSON that was validated before such as a contract's state. `Addr` must not be used in messages sent by the user because this would result in unvalidated instances. - -This type is immutable. If you really need to mutate it (Really? Are you sure?), create a mutable copy using `let mut mutable = Addr::to_string()` and operate on that `String` instance. - -| type | -| ----------- | -| **string**. | - -### Binary - -A string containing Base64-encoded data. - -| type | -| ----------- | -| **string**. | - -### NQuads - -N-Quads Format - -N-Quads is an extension of N-Triples to support RDF datasets by adding an optional fourth element to represent the graph name. See the [official N-Quads specification](https://www.w3.org/TR/n-quads/). - -| literal | -| ----------- | -| `"n_quads"` | - -### RdfDatasetFormat - -Represents the various serialization formats for an RDF dataset, i.e. a collection of RDF graphs ([RDF Dataset](https://www.w3.org/TR/rdf11-concepts/#section-dataset)). - -| variant | description | -| ----------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [NQuads](#nquads) | **string**: `n_quads`. N-Quads Format

N-Quads is an extension of N-Triples to support RDF datasets by adding an optional fourth element to represent the graph name. See the [official N-Quads specification](https://www.w3.org/TR/n-quads/). | - -### TripleStoreConfig - -`TripleStoreConfig` represents the configuration related to the management of the triple store. - -| property | description | -| ------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `code_id` | _(Required.) _ **[Uint64](#uint64)**. The code id that will be used to instantiate the triple store contract in which to store dataverse semantic data. It must implement the cognitarium interface. | -| `limits` | _(Required.) _ **[TripleStoreLimitsInput](#triplestorelimitsinput)**. Limitations regarding triple store usage. | -| `limits.max_byte_size` | **[Uint128](#uint128)\|null**. The maximum number of bytes the store can contain. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `limits.max_insert_data_byte_size` | **[Uint128](#uint128)\|null**. The maximum number of bytes an insert data query can contain. Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `limits.max_insert_data_triple_count` | **[Uint128](#uint128)\|null**. The maximum number of triples an insert data query can contain (after parsing). Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `limits.max_query_limit` | **integer\|null**. The maximum limit of a query, i.e. the maximum number of triples returned by a select query. Default to 30 if not set. | -| `limits.max_query_variable_count` | **integer\|null**. The maximum number of variables a query can select. Default to 30 if not set. | -| `limits.max_triple_byte_size` | **[Uint128](#uint128)\|null**. The maximum number of bytes the store can contain for a single triple. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. The limit is used to prevent storing very large triples, especially literals. Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `limits.max_triple_count` | **[Uint128](#uint128)\|null**. The maximum number of triples the store can contain. Default to [Uint128::MAX] if not set, which can be considered as no limit. | - -### TripleStoreLimitsInput - -Contains requested limitations regarding store usages. - -| property | description | -| ------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `max_byte_size` | **[Uint128](#uint128)\|null**. The maximum number of bytes the store can contain. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `max_insert_data_byte_size` | **[Uint128](#uint128)\|null**. The maximum number of bytes an insert data query can contain. Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `max_insert_data_triple_count` | **[Uint128](#uint128)\|null**. The maximum number of triples an insert data query can contain (after parsing). Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `max_query_limit` | **integer\|null**. The maximum limit of a query, i.e. the maximum number of triples returned by a select query. Default to 30 if not set. | -| `max_query_variable_count` | **integer\|null**. The maximum number of variables a query can select. Default to 30 if not set. | -| `max_triple_byte_size` | **[Uint128](#uint128)\|null**. The maximum number of bytes the store can contain for a single triple. The size of a triple is counted as the sum of the size of its subject, predicate and object, including the size of data types and language tags if any. The limit is used to prevent storing very large triples, especially literals. Default to [Uint128::MAX] if not set, which can be considered as no limit. | -| `max_triple_count` | **[Uint128](#uint128)\|null**. The maximum number of triples the store can contain. Default to [Uint128::MAX] if not set, which can be considered as no limit. | - -### Uint128 - -A string containing a 128-bit integer in decimal representation. - -| type | -| ----------- | -| **string**. | - -### Uint64 - -A thin wrapper around u64 that is using strings for JSON encoding/decoding, such that the full u64 range can be used for clients that convert JSON numbers to floats, like JavaScript and jq. - -# Examples - -Use `from` to create instances of this and `u64` to get the value out: - -````# use cosmwasm_std::Uint64; let a = Uint64::from(42u64); assert_eq!(a.u64(), 42); - -let b = Uint64::from(70u32); assert_eq!(b.u64(), 70); ``` - -|type| -|----| -|**string**.| - ---- - -*Rendered by [Fadroma](https://fadroma.tech) ([@fadroma/schema 1.1.0](https://www.npmjs.com/package/@fadroma/schema)) from `axone-dataverse.json` (`854e4cf3e67b73f0`)* -```` diff --git a/docs/axone-dummy.md b/docs/axone-dummy.md new file mode 100644 index 00000000..4662bc31 --- /dev/null +++ b/docs/axone-dummy.md @@ -0,0 +1,44 @@ +# Axone Dummy Contract + +This is a simple dummy contract kept as a placeholder during the refactoring of the AXONE protocol contracts. It does not implement any real functionality. + +## InstantiateMsg + +Instantiate message + +| type | +| ----------- | +| **object**. | + +## ExecuteMsg + +Execute messages + +### ExecuteMsg::Foo + +| literal | +| ------- | +| `"foo"` | + +## QueryMsg + +Query messages + +### QueryMsg::Bar + +| parameter | description | +| --------- | -------------------------- | +| `bar` | _(Required.) _ **object**. | +| `bar.msg` | _(Required.) _ **string**. | + +## Responses + +### bar + +| property | description | +| -------- | -------------------------------------------- | +| `msg` | _(Required.) _ **string**. The message value | + +--- + +_Rendered by [Fadroma](https://fadroma.tech) ([@fadroma/schema 1.1.0](https://www.npmjs.com/package/@fadroma/schema)) from `axone-dummy.json` (`7cfdeffbb29ab213`)_ diff --git a/docs/axone-law-stone.md b/docs/axone-law-stone.md deleted file mode 100644 index 12afbfdd..00000000 --- a/docs/axone-law-stone.md +++ /dev/null @@ -1,139 +0,0 @@ -# Law Stone - -## Overview - -The `axone-law-stone` smart contract aims to provide GaaS (i.e. Governance as a Service) in any [Cosmos blockchains](https://cosmos.network/) using the [CosmWasm](https://cosmwasm.com/) framework and the [Logic](https://docs.axone.xyz/modules/next/logic) AXONE module. - -This contract is built around a Prolog program describing the law by rules and facts. The law stone is immutable, this means it can only be questioned, there are no update mechanisms. - -The `axone-law-stone` responsibility is to guarantee the availability of its rules in order to question them, but not to ensure the rules application. - -To ensure reliability over time, the associated Prolog program is stored and pinned in a `axone-objectarium` contract. Moreover, all the eventual loaded files must be stored in a `axone-objectarium` contract as well, allowing the contract to pin them. - -To be able to free the underlying resources (i.e. objects in `axone-objectarium`) if not used anymore, the contract admin can break the stone. - -➡️ Checkout the [examples](https://github.com/axone-protocol/contracts/tree/main/contracts/axone-law-stone/examples/) for usage information. - -## InstantiateMsg - -Instantiate message - -| parameter | description | -| ----------------- | ------------------------------------------------------------------------------------------------------ | -| `program` | _(Required.) _ **[Binary](#binary)**. The Prolog program carrying law rules and facts. | -| `storage_address` | _(Required.) _ **string**. The `axone-objectarium` contract address on which to store the law program. | - -## ExecuteMsg - -Execute messages - -### ExecuteMsg::BreakStone - -Break the stone making this contract unusable, by clearing all the related resources: - Unpin all the pinned objects on `axone-objectarium` contracts, if any. - Forget the main program (i.e. or at least unpin it). - -Only the creator address (the address that instantiated the contract) is authorized to invoke this message. If already broken, this is a no-op. - -| parameter | description | -| ------------- | -------------------------- | -| `break_stone` | _(Required.) _ **object**. | - -## QueryMsg - -Query messages - -### QueryMsg::Ask - -Submits a Prolog query string to the `Logic` module, evaluating it against the law program associated with this contract. - -If the law stone is broken the query returns a response with the error `error(system_error(broken_law_stone),root)` set in the `answer` field. - -| parameter | description | -| ----------- | -------------------------- | -| `ask` | _(Required.) _ **object**. | -| `ask.query` | _(Required.) _ **string**. | - -### QueryMsg::Program - -Retrieves the location metadata of the law program bound to this contract. - -This includes the contract address of the `objectarium` and the program object ID, where the law program's code can be accessed. - -| parameter | description | -| --------- | -------------------------- | -| `program` | _(Required.) _ **object**. | - -### QueryMsg::ProgramCode - -Fetches the raw code of the law program tied to this contract. - -If the law stone is broken, the query may fail if the program is no longer available in the `Objectarium`. - -| parameter | description | -| -------------- | -------------------------- | -| `program_code` | _(Required.) _ **object**. | - -## Responses - -### ask - -| property | description | -| ------------- | ---------------------------- | -| `answer` | **[Answer](#answer)\|null**. | -| `gas_used` | _(Required.) _ **integer**. | -| `height` | _(Required.) _ **integer**. | -| `user_output` | **string\|null**. | - -### program - -ProgramResponse carry elements to locate the program in a `axone-objectarium` contract. - -| property | description | -| ----------------- | ------------------------------------------------------------------------------------------------------- | -| `object_id` | _(Required.) _ **string**. The program object id in the `axone-objectarium` contract. | -| `storage_address` | _(Required.) _ **string**. The `axone-objectarium` contract address on which the law program is stored. | - -### program_code - -Binary is a wrapper around Vec<u8> to add base64 de/serialization with serde. It also adds some helper methods to help encode inline. - -This is only needed as serde-json-\{core,wasm\} has a horrible encoding for Vec<u8>. See also <https://github.com/CosmWasm/cosmwasm/blob/main/docs/MESSAGE_TYPES.md>. - -| type | -| ----------- | -| **string**. | - -## Definitions - -### Answer - -| property | description | -| ----------- | -------------------------------------------------- | -| `has_more` | _(Required.) _ **boolean**. | -| `results` | _(Required.) _ **Array<[Result](#result)>**. | -| `variables` | _(Required.) _ **Array<string>**. | - -### Binary - -A string containing Base64-encoded data. - -| type | -| ----------- | -| **string**. | - -### Result - -| property | description | -| --------------- | -------------------------------------------------------------- | -| `error` | **string\|null**. | -| `substitutions` | _(Required.) _ **Array<[Substitution](#substitution)>**. | - -### Substitution - -| property | description | -| ------------ | -------------------------- | -| `expression` | _(Required.) _ **string**. | -| `variable` | _(Required.) _ **string**. | - ---- - -_Rendered by [Fadroma](https://fadroma.tech) ([@fadroma/schema 1.1.0](https://www.npmjs.com/package/@fadroma/schema)) from `axone-law-stone.json` (`7546fa394dc640cf`)_ diff --git a/docs/axone-objectarium.md b/docs/axone-objectarium.md deleted file mode 100644 index a8601930..00000000 --- a/docs/axone-objectarium.md +++ /dev/null @@ -1,636 +0,0 @@ -# Objectarium - -A [CosmWasm](https://cosmwasm.com/) Smart Contract which enables the storage of arbitrary unstructured [Objects](https://en.wikipedia.org/wiki/Object_storage) in any [Cosmos](https://cosmos.network/) blockchains. - -## Purpose - -The smart contract serves as a robust storage solution, allowing for the storage of arbitrary `objects` on any blockchain within the [Cosmos blockchains](https://cosmos.network/) network, utilizing the [CosmWasm](https://cosmwasm.com/) framework. The key features of the contract include: - -**Versatile Data Storage:** -The contract is designed to accommodate any type of data, be it text, images, or complex data structures. This flexibility makes it an ideal choice for a wide range of decentralized applications (dApps) that require diverse storage needs. - -**On-chain Data:** -By design, the contract stores data on the blockchain, ensuring that it is immutable and publicly accessible. This is particularly useful for applications that require a high level of transparency, and also for any other smart contract that needs to store data on the blockchain. - -**Pinning and Unpinning:** -One unique feature is the ability to 'pin' and 'unpin' objects associated with a specific sender address. Pinning ensures that the object remains stored and accessible, while unpinning releases it from being permanently stored, offering a level of control over data persistence. - -**Object Removal:** -The contract also includes a 'forget' function, allowing for the removal of objects that are no longer pinned. This is particularly useful for managing storage costs and ensuring that only relevant data remains on the blockchain. - -**Cost Management:** -Features like pinning, unpinning, and discarding objects offer a strategic way to control storage costs. Additionally, setting limits on contract size — for instance in terms of object count and their individual sizes — serves as a practical tool to regulate storage costs. - -## Rationale - -In a sense, we can consider blockchains built on the [Cosmos L0](https://docs.cosmos.network/main) layer as decentralized databases, and their nature can be shaped and modeled through the smart contracts or modules. Given this, it provides a great opportunity to address the wide range of data management needs. One such important area is the management of unstructured, immutable data, which is written once but accessed frequently — commonly known as object storage. This is the primary focus of `axone-objectarium`: a specialized smart contract designed to offer a versatile and efficient approach to handling _on-chain_, _unstructured_, _immutable_ data in a _decentralized_ manner. - -## Terminology - -### Object - -In the context of the `axone-objectarium` smart contract, an `object` refers to a piece of data stored on the blockchain. It can represent various types of information, such as documents, binary files, or any other digital content. Objects are immutable once stored and are identified by their cryptographic hash, which can be generated using algorithms like MD5 or SHA256. This ensures the integrity and security of the stored data, as any modification to the object would result in a different hash value. - -### Bucket - -The smart contract is organized around buckets. A bucket represents a logical container within the `axone-objectarium` smart contract instance that groups related Objects together. It acts as a storage unit for Objects and provides a context for managing and organizing them. Each bucket has a unique name and is associated with a set of configurations and limits that define its behaviour and characteristics. - -### Pin - -Pin refers to a mechanism that allows users to mark or "pin" specific objects within a bucket. Pinning an object serves as a way to ensure that the object remains in storage and cannot be removed (this is called "forgotten"). It provides protection and guarantees that the pinned object will persist in the protocol. When an object is pinned, it is associated with the identity (or sender) that performed the pinning action. - -## Usage - -The unstructured nature of the data stored in the chain opens up a plethora of possibilities for decentralized applications that require this type of versatile storage. - -### In the AXONE protocol - -The primary function of this smart contract within the AXONE protocol is to enable the persistence of governance rules, which are encoded in Prolog. These programs are stored in an immutable format within the protocol and can be referenced by their unique identifiers in situations where there is a need to refer to these rules. - -### In the wild world - -A plethora of possibilities opens up for decentralized applications (dApps) that require this kind of versatile storage. However, it's important to consider the following constraints: the data is immutable, the cost of recording the data is proportional to its size, and the data is publicly accessible. - -## Play - -### Instantiation - -The `axone-objectarium` can be instantiated as follows, refer to the schema for more information on configuration, limits and pagination configuration: - -```bash -axoned tx wasm instantiate $CODE_ID \ - --label "my-storage" \ - --from $ADDR \ - --admin $ADMIN_ADDR \ - --gas 1000000 \ - '{"bucket":"my-bucket"}' -``` - -### Execution - -We can store an object by providing its data in base64 encoded, we can pin the stored object to prevent it from being removed: - -```bash -axoned tx wasm execute $CONTRACT_ADDR \ - --from $ADDR \ - --gas 1000000 \ - "{\"store_object\":{\"data\": \"$(cat my-data | base64)\",\"pin\":true}}" -``` - -The object id is stable as it is a hash, we can't store an object twice. - -With the following commands we can pin and unpin existing objects: - -```bash -axoned tx wasm execute $CONTRACT_ADDR \ - --from $ADDR \ - --gas 1000000 \ - "{\"pin_object\":{\"id\": \"$OBJECT_ID\"}}" - -axoned tx wasm execute $CONTRACT_ADDR \ - --from $ADDR \ - --gas 1000000 \ - "{\"unpin_object\":{\"id\": \"$OBJECT_ID\"}}" -``` - -And if an object is not pinned, or pinned by the sender of transaction, we can remove it: - -```bash -axoned tx wasm execute $CONTRACT_ADDR \ - --from $ADDR \ - --gas 1000000 \ - "{\"forget_object\":{\"id\": \"$OBJECT_ID\"}}" -``` - -### Querying - -Query an object by its id: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"object\": {\"id\": \"$OBJECT_ID\"}}" -``` - -Or its data: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"object_data\": {\"id\": \"$OBJECT_ID\"}}" -``` - -We can also list the objects, eventually filtering on the object owner: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"objects\": {\"address\": \"axone1p8u47en82gmzfm259y6z93r9qe63l25d858vqu\"}}" -``` - -And navigate in a cursor based pagination: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"objects\": {\"first\": 5, \"after\": \"23Y5t5DBe7DkPwfJo3Sd26Y8Z9epmtpA1FTpdG7DiG6MD8vPRTzzbQ9TccmyoBcePkPK6atUiqcAzJVo3TfYNBGY\"}}" -``` - -We can also query object pins with the same cursor based pagination: - -```bash -axoned query wasm contract-state smart $CONTRACT_ADDR \ - "{\"object_pins\": {\"id\": \"$OBJECT_ID\", \"first\": 5, \"after\": \"23Y5t5DBe7DkPwfJo3Sd26Y8Z9epmtpA1FTpdG7DiG6MD8vPRTzzbQ9TccmyoBcePkPK6atUiqcAzJVo3TfYNBGY\"}}" -``` - -## InstantiateMsg - -Instantiate messages - -| parameter | description | -| ------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `bucket` | _(Required.) _ **string**. The name of the bucket. The name could not be empty or contains whitespaces. If name contains whitespace, they will be removed. | -| `config` | **[BucketConfig](#bucketconfig)**. The configuration of the bucket. | -| `config.compression_algorithm` | **[CompressionAlgorithm](#compressionalgorithm)**. The compression algorithm used for all objects in the bucket. All objects stored in the bucket will use this compression algorithm. The default algorithm is Passthrough if not set.
**Default:** `"passthrough"` | -| `config.hash_algorithm` | **[HashAlgorithm](#hashalgorithm)**. The algorithm used to hash the content of the objects to generate the id of the objects. The algorithm is optional and if not set, the default algorithm is used.

The default algorithm is Sha256 if not set.
**Default:** `"sha256"` | -| `limits` | **[BucketLimits](#bucketlimits)**. The limits of the bucket. | -| `limits.max_object_pins` | **[Uint128](#uint128)\|null**. The maximum number of pins in the bucket for an object. | -| `limits.max_object_size` | **[Uint128](#uint128)\|null**. The maximum size of the objects in the bucket. | -| `limits.max_objects` | **[Uint128](#uint128)\|null**. The maximum number of objects in the bucket. | -| `limits.max_total_size` | **[Uint128](#uint128)\|null**. The maximum total size of the objects in the bucket. | -| `owner` | **string\|null**. The contract owner. If not set, the contract starts without an owner (ownerless). | -| `pagination` | **[PaginationConfig](#paginationconfig)**. The configuration for paginated query. | -| `pagination.default_page_size` | **integer**. The default number of elements in a page.

Shall be less or equal than `max_page_size`. Default to '10' if not set.
**Default:** `10` | -| `pagination.max_page_size` | **integer**. The maximum elements a page can contain.

Shall be less than `u32::MAX - 1`. Default to '30' if not set.
**Default:** `30` | - -## ExecuteMsg - -Execute messages - -### ExecuteMsg::StoreObject - -StoreObject store an object to the bucket. The object is referenced by the hash of its content and this value is returned. If the object is already stored, it is a no-op. It may be pinned though. - -The "pin" parameter specifies whether the object should be pinned for the sender. Pinning an object ensures it is protected from being removed from storage, making it persistent and guaranteeing its indefinite accessibility. It’s important to note that pinning is optional; objects can be stored without pinning. However, be aware that non-pinned objects can be removed from the storage by anyone at any time, making them no longer accessible. - -The object will be compressed using the bucket's compression algorithm as specified in the bucket configuration. - -| parameter | description | -| ------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `store_object` | _(Required.) _ **object**. | -| `store_object.data` | _(Required.) _ **[Binary](#binary)**. The content of the object to store. | -| `store_object.pin` | _(Required.) _ **boolean**. Specifies whether the object should be pinned for the sender. Pinning ensures the object remains persistent and cannot be removed from storage by anyone. | - -### ExecuteMsg::ForgetObject - -ForgetObject first unpins the object from the bucket for the sender, then removes it from storage if it is no longer pinned by anyone. If the object is still pinned by other senders, it is not removed from storage and an error is returned. If the object is not pinned for the sender, this operation is a no-op. - -| parameter | description | -| ------------------ | -------------------------- | -| `forget_object` | _(Required.) _ **object**. | -| `forget_object.id` | _(Required.) _ **string**. | - -### ExecuteMsg::PinObject - -PinObject pins the object in the bucket for the sender. If the object is already pinned for the sender, this operation is a no-op. While an object is pinned, it cannot be removed from storage. - -| parameter | description | -| --------------- | -------------------------- | -| `pin_object` | _(Required.) _ **object**. | -| `pin_object.id` | _(Required.) _ **string**. | - -### ExecuteMsg::UnpinObject - -UnpinObject unpins the object in the bucket for the sender. If the object is not pinned for the sender, this operation is a no-op. The object can be removed from storage if it is no longer pinned by anyone. - -| parameter | description | -| ----------------- | -------------------------- | -| `unpin_object` | _(Required.) _ **object**. | -| `unpin_object.id` | _(Required.) _ **string**. | - -### ExecuteMsg::undefined - -Update the contract's ownership. The `action` to be provided can be either to propose transferring ownership to an account, accept a pending ownership transfer, or renounce the ownership permanently. - -| parameter | description | -| ------------------ | ------------------------------------------ | -| `update_ownership` | _(Required.) _ **object\|string\|string**. | - -## QueryMsg - -Query messages - -### QueryMsg::Bucket - -Bucket returns the bucket information. - -| parameter | description | -| --------- | -------------------------- | -| `bucket` | _(Required.) _ **object**. | - -### QueryMsg::Object - -Object returns the object information with the given id. - -| parameter | description | -| ----------- | ------------------------------------------------------- | -| `object` | _(Required.) _ **object**. | -| `object.id` | _(Required.) _ **string**. The id of the object to get. | - -### QueryMsg::Objects - -Objects returns the list of objects in the bucket with support for pagination. - -| parameter | description | -| --------------- | ----------------------------------------------------------------------- | -| `objects` | _(Required.) _ **object**. | -| `objects.after` | **string\|null**. The point in the sequence to start returning objects. | -| `objects.first` | **integer\|null**. The number of objects to return. | - -### QueryMsg::ObjectData - -ObjectData returns the content of the object with the given id. - -| parameter | description | -| ---------------- | ------------------------------------------------------- | -| `object_data` | _(Required.) _ **object**. | -| `object_data.id` | _(Required.) _ **string**. The id of the object to get. | - -### QueryMsg::ObjectsPinnedBy - -ObjectsPinnedBy returns the list of objects pinned by the given address with support for pagination. - -| parameter | description | -| --------------------------- | ------------------------------------------------------------------------------ | -| `objects_pinned_by` | _(Required.) _ **object**. | -| `objects_pinned_by.address` | _(Required.) _ **string**. The address whose pinned objects should be listed. | -| `objects_pinned_by.after` | **string\|null**. The point in the sequence to start returning pinned objects. | -| `objects_pinned_by.first` | **integer\|null**. The number of objects to return. | - -### QueryMsg::PinsForObject - -PinsForObject returns the list of addresses that pinned the object with the given id with support for pagination. - -| parameter | description | -| --------------------------- | ---------------------------------------------------------------------------------------- | -| `pins_for_object` | _(Required.) _ **object**. | -| `pins_for_object.after` | **string\|null**. The point in the sequence to start returning pins. | -| `pins_for_object.first` | **integer\|null**. The number of pins to return. | -| `pins_for_object.object_id` | _(Required.) _ **string**. The id of the object for which to list all pinning addresses. | - -### QueryMsg::undefined - -Query the contract's ownership information - -| parameter | description | -| ----------- | -------------------------- | -| `ownership` | _(Required.) _ **object**. | - -## Responses - -### bucket - -BucketResponse is the response of the Bucket query. - -| property | description | -| ------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `config` | _(Required.) _ **[BucketConfig](#bucketconfig)**. The configuration of the bucket. | -| `config.compression_algorithm` | **[CompressionAlgorithm](#compressionalgorithm)**. The compression algorithm used for all objects in the bucket. All objects stored in the bucket will use this compression algorithm. The default algorithm is Passthrough if not set.
**Default:** `"passthrough"` | -| `config.hash_algorithm` | **[HashAlgorithm](#hashalgorithm)**. The algorithm used to hash the content of the objects to generate the id of the objects. The algorithm is optional and if not set, the default algorithm is used.

The default algorithm is Sha256 if not set.
**Default:** `"sha256"` | -| `limits` | _(Required.) _ **[BucketLimits](#bucketlimits)**. The limits of the bucket. | -| `limits.max_object_pins` | **[Uint128](#uint128)\|null**. The maximum number of pins in the bucket for an object. | -| `limits.max_object_size` | **[Uint128](#uint128)\|null**. The maximum size of the objects in the bucket. | -| `limits.max_objects` | **[Uint128](#uint128)\|null**. The maximum number of objects in the bucket. | -| `limits.max_total_size` | **[Uint128](#uint128)\|null**. The maximum total size of the objects in the bucket. | -| `name` | _(Required.) _ **string**. The name of the bucket. | -| `pagination` | _(Required.) _ **[PaginationConfig](#paginationconfig)**. The configuration for paginated query. | -| `pagination.default_page_size` | **integer**. The default number of elements in a page.

Shall be less or equal than `max_page_size`. Default to '10' if not set.
**Default:** `10` | -| `pagination.max_page_size` | **integer**. The maximum elements a page can contain.

Shall be less than `u32::MAX - 1`. Default to '30' if not set.
**Default:** `30` | -| `stat` | _(Required.) _ **[BucketStat](#bucketstat)**. The statistics of the bucket. | -| `stat.compressed_size` | **[Uint128](#uint128)**. The total size of the objects contained in the bucket after compression. | -| `stat.object_count` | **[Uint128](#uint128)**. The number of objects in the bucket. | -| `stat.size` | **[Uint128](#uint128)**. The total size of the objects contained in the bucket. | - -### object - -ObjectResponse is the response of the Object query. - -| property | description | -| ----------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | -| `compressed_size` | _(Required.) _ **[Uint128](#uint128)**. The size of the object when compressed. If the object is not compressed, the value is the same as `size`. | -| `id` | _(Required.) _ **string**. The id of the object. | -| `is_pinned` | _(Required.) _ **boolean**. Tells if the object is pinned by at least one address. | -| `size` | _(Required.) _ **[Uint128](#uint128)**. The size of the object. | - -### object_data - -Binary is a wrapper around Vec<u8> to add base64 de/serialization with serde. It also adds some helper methods to help encode inline. - -This is only needed as serde-json-\{core,wasm\} has a horrible encoding for Vec<u8>. See also <https://github.com/CosmWasm/cosmwasm/blob/main/docs/MESSAGE_TYPES.md>. - -| type | -| ----------- | -| **string**. | - -### objects - -ObjectsResponse is the response of the Objects query. - -| property | description | -| ------------------------- | ----------------------------------------------------------------------------------------------------- | -| `data` | _(Required.) _ **Array<[ObjectResponse](#objectresponse)>**. The list of objects in the bucket. | -| `page_info` | _(Required.) _ **[PageInfo](#pageinfo)**. The page information. | -| `page_info.cursor` | **string**. The cursor to the next page. | -| `page_info.has_next_page` | **boolean**. Tells if there is a next page. | - -### objects_pinned_by - -ObjectsResponse is the response of the Objects query. - -| property | description | -| ------------------------- | ----------------------------------------------------------------------------------------------------- | -| `data` | _(Required.) _ **Array<[ObjectResponse](#objectresponse)>**. The list of objects in the bucket. | -| `page_info` | _(Required.) _ **[PageInfo](#pageinfo)**. The page information. | -| `page_info.cursor` | **string**. The cursor to the next page. | -| `page_info.has_next_page` | **boolean**. Tells if there is a next page. | - -### ownership - -The contract's ownership info - -| property | description | -| ---------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `owner` | **string\|null**. The contract's current owner. `None` if the ownership has been renounced. | -| `pending_expiry` | **[Expiration](#expiration)\|null**. The deadline for the pending owner to accept the ownership. `None` if there isn't a pending ownership transfer, or if a transfer exists and it doesn't have a deadline. | -| `pending_owner` | **string\|null**. The account who has been proposed to take over the ownership. `None` if there isn't a pending ownership transfer. | - -### pins_for_object - -PinsForObjectResponse is the response of the GetObjectPins query. - -| property | description | -| ------------------------- | ------------------------------------------------------------------------------------- | -| `data` | _(Required.) _ **Array<string>**. The list of addresses that pinned the object. | -| `page_info` | _(Required.) _ **[PageInfo](#pageinfo)**. The page information. | -| `page_info.cursor` | **string**. The cursor to the next page. | -| `page_info.has_next_page` | **boolean**. Tells if there is a next page. | - -## Definitions - -### Action - -Actions that can be taken to alter the contract's ownership - -| variant | description | -| --------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| undefined | **object**. Propose to transfer the contract's ownership to another account, optionally with an expiry time.

Can only be called by the contract's current owner.

Any existing pending ownership transfer is overwritten. | -| undefined | **string**: `accept_ownership`. Accept the pending ownership transfer.

Can only be called by the pending owner. | -| undefined | **string**: `renounce_ownership`. Give up the contract's ownership and the possibility of appointing a new owner.

Can only be invoked by the contract's current owner.

Any existing pending ownership transfer is canceled. | - -### Binary - -A string containing Base64-encoded data. - -| type | -| ----------- | -| **string**. | - -### BucketConfig - -BucketConfig is the type of the configuration of a bucket. - -The configuration is set at the instantiation of the bucket, and is immutable and cannot be changed. The configuration is optional and if not set, the default configuration is used. - -| property | description | -| ----------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `compression_algorithm` | **[CompressionAlgorithm](#compressionalgorithm)**. The compression algorithm used for all objects in the bucket. All objects stored in the bucket will use this compression algorithm. The default algorithm is Passthrough if not set. | -| `hash_algorithm` | **[HashAlgorithm](#hashalgorithm)**. The algorithm used to hash the content of the objects to generate the id of the objects. The algorithm is optional and if not set, the default algorithm is used.

The default algorithm is Sha256 if not set. | - -### BucketLimits - -BucketLimits is the type of the limits of a bucket. - -The limits are optional and if not set, there is no limit. - -| property | description | -| ----------------- | -------------------------------------------------------------------------------------- | -| `max_object_pins` | **[Uint128](#uint128)\|null**. The maximum number of pins in the bucket for an object. | -| `max_object_size` | **[Uint128](#uint128)\|null**. The maximum size of the objects in the bucket. | -| `max_objects` | **[Uint128](#uint128)\|null**. The maximum number of objects in the bucket. | -| `max_total_size` | **[Uint128](#uint128)\|null**. The maximum total size of the objects in the bucket. | - -### BucketStat - -BucketStat is the type of the statistics of a bucket. - -| property | description | -| ----------------- | ---------------------------------------------------------------------------------------------------------------- | -| `compressed_size` | _(Required.) _ **[Uint128](#uint128)**. The total size of the objects contained in the bucket after compression. | -| `object_count` | _(Required.) _ **[Uint128](#uint128)**. The number of objects in the bucket. | -| `size` | _(Required.) _ **[Uint128](#uint128)**. The total size of the objects contained in the bucket. | - -### CompressionAlgorithm - -CompressionAlgorithm is an enumeration that defines the different compression algorithms supported for compressing the content of objects. The compression algorithm specified here are relevant algorithms for compressing data on-chain, which means that they are fast to compress and decompress, and have a low computational cost. - -The order of the compression algorithms is based on their estimated computational cost (quite opinionated) during both compression and decompression, ranging from the lowest to the highest. This particular order is utilized to establish the default compression algorithm for storing an object. - -| variant | description | -| --------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [Passthrough](#passthrough) | **string**: `passthrough`. Represents no compression algorithm. The object is stored as is without any compression. | -| [Snappy](#snappy) | **string**: `snappy`. Represents the Snappy algorithm. Snappy is a compression/decompression algorithm that does not aim for maximum compression. Instead, it aims for very high speeds and reasonable compression.

See [the snappy web page](https://google.github.io/snappy/) for more information. | -| [Lzma](#lzma) | **string**: `lzma`. Represents the LZMA algorithm. LZMA is a lossless data compression/decompression algorithm that features a high compression ratio and a variable compression-dictionary size up to 4 GB.

See [the LZMA wiki page](https://en.wikipedia.org/wiki/Lempel%E2%80%93Ziv%E2%80%93Markov_chain_algorithm) for more information. | - -### Expiration - -Expiration represents a point in time when some event happens. It can compare with a BlockInfo and will return is_expired() == true once the condition is hit (and for every block in the future) - -| variant | description | -| --------- | ---------------------------------------------------------------------- | -| undefined | **object**. AtHeight will expire when `env.block.height` >= height | -| undefined | **object**. AtTime will expire when `env.block.time` >= time | -| undefined | **object**. Never will never expire. Used to express the empty variant | - -### HashAlgorithm - -HashAlgorithm is an enumeration that defines the different hash algorithms supported for hashing the content of objects. - -| variant | description | -| ----------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [MD5](#md5) | **string**: `m_d5`. Represents the MD5 algorithm. MD5 is a widely used cryptographic hash function that produces a 128-bit hash value. The computational cost of MD5 is relatively low compared to other hash functions, but its short hash length makes it easier to find hash collisions. It is now considered insecure for cryptographic purposes, but can still used in non-security contexts.

MD5 hashes are stored on-chain as 32 hexadecimal characters.

See [the MD5 Wikipedia page](https://en.wikipedia.org/wiki/MD5) for more information. | -| [Sha224](#sha224) | **string**: `sha224`. Represents the SHA-224 algorithm. SHA-224 is a variant of the SHA-2 family of hash functions that produces a 224-bit hash value. It is similar to SHA-256, but with a shorter output size. The computational cost of SHA-224 is moderate, and its relatively short hash length makes it easier to store and transmit.

SHA-224 hashes are stored on-chain as 56 hexadecimal characters.

See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. | -| [SHA256](#sha256) | **string**: `sha256`. Represents the SHA-256 algorithm. SHA-256 is a member of the SHA-2 family of hash functions that produces a 256-bit hash value. It is widely used in cryptography and other security-related applications. The computational cost of SHA-256 is moderate, and its hash length strikes a good balance between security and convenience.

SHA-256 hashes are stored on-chain as 64 hexadecimal characters.

See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. | -| [SHA384](#sha384) | **string**: `sha384`. Represents the SHA-384 algorithm. SHA-384 is a variant of the SHA-2 family of hash functions that produces a 384-bit hash value. It is similar to SHA-512, but with a shorter output size. The computational cost of SHA-384 is relatively high, but its longer hash length provides better security against hash collisions.

SHA-384 hashes are stored on-chain as 96 hexadecimal characters.

See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. | -| [SHA512](#sha512) | **string**: `sha512`. Represents the SHA-512 algorithm. SHA-512 is a member of the SHA-2 family of hash functions that produces a 512-bit hash value. It is widely used in cryptography and other security-related applications. The computational cost of SHA-512 is relatively high, but its longer hash length provides better security against hash collisions.

SHA-512 hashes are stored on-chain as 128 hexadecimal characters.

See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. | - -### Lzma - -Represents the LZMA algorithm. LZMA is a lossless data compression/decompression algorithm that features a high compression ratio and a variable compression-dictionary size up to 4 GB. - -See [the LZMA wiki page](https://en.wikipedia.org/wiki/Lempel%E2%80%93Ziv%E2%80%93Markov_chain_algorithm) for more information. - -| literal | -| -------- | -| `"lzma"` | - -### MD5 - -Represents the MD5 algorithm. MD5 is a widely used cryptographic hash function that produces a 128-bit hash value. The computational cost of MD5 is relatively low compared to other hash functions, but its short hash length makes it easier to find hash collisions. It is now considered insecure for cryptographic purposes, but can still used in non-security contexts. - -MD5 hashes are stored on-chain as 32 hexadecimal characters. - -See [the MD5 Wikipedia page](https://en.wikipedia.org/wiki/MD5) for more information. - -| literal | -| -------- | -| `"m_d5"` | - -### ObjectResponse - -ObjectResponse is the response of the Object query. - -| property | description | -| ----------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | -| `compressed_size` | _(Required.) _ **[Uint128](#uint128)**. The size of the object when compressed. If the object is not compressed, the value is the same as `size`. | -| `id` | _(Required.) _ **string**. The id of the object. | -| `is_pinned` | _(Required.) _ **boolean**. Tells if the object is pinned by at least one address. | -| `size` | _(Required.) _ **[Uint128](#uint128)**. The size of the object. | - -### PageInfo - -PageInfo is the page information returned for paginated queries. - -| property | description | -| --------------- | ---------------------------------------------------------- | -| `cursor` | _(Required.) _ **string**. The cursor to the next page. | -| `has_next_page` | _(Required.) _ **boolean**. Tells if there is a next page. | - -### PaginationConfig - -PaginationConfig is the type carrying configuration for paginated queries. - -The fields are optional and if not set, there is a default configuration. - -| property | description | -| ------------------- | ------------------------------------------------------------------------------------------------------------------------------------------ | -| `default_page_size` | **integer**. The default number of elements in a page.

Shall be less or equal than `max_page_size`. Default to '10' if not set. | -| `max_page_size` | **integer**. The maximum elements a page can contain.

Shall be less than `u32::MAX - 1`. Default to '30' if not set. | - -### Passthrough - -Represents no compression algorithm. The object is stored as is without any compression. - -| literal | -| --------------- | -| `"passthrough"` | - -### SHA256 - -Represents the SHA-256 algorithm. SHA-256 is a member of the SHA-2 family of hash functions that produces a 256-bit hash value. It is widely used in cryptography and other security-related applications. The computational cost of SHA-256 is moderate, and its hash length strikes a good balance between security and convenience. - -SHA-256 hashes are stored on-chain as 64 hexadecimal characters. - -See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. - -| literal | -| ---------- | -| `"sha256"` | - -### SHA384 - -Represents the SHA-384 algorithm. SHA-384 is a variant of the SHA-2 family of hash functions that produces a 384-bit hash value. It is similar to SHA-512, but with a shorter output size. The computational cost of SHA-384 is relatively high, but its longer hash length provides better security against hash collisions. - -SHA-384 hashes are stored on-chain as 96 hexadecimal characters. - -See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. - -| literal | -| ---------- | -| `"sha384"` | - -### SHA512 - -Represents the SHA-512 algorithm. SHA-512 is a member of the SHA-2 family of hash functions that produces a 512-bit hash value. It is widely used in cryptography and other security-related applications. The computational cost of SHA-512 is relatively high, but its longer hash length provides better security against hash collisions. - -SHA-512 hashes are stored on-chain as 128 hexadecimal characters. - -See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. - -| literal | -| ---------- | -| `"sha512"` | - -### Sha224 - -Represents the SHA-224 algorithm. SHA-224 is a variant of the SHA-2 family of hash functions that produces a 224-bit hash value. It is similar to SHA-256, but with a shorter output size. The computational cost of SHA-224 is moderate, and its relatively short hash length makes it easier to store and transmit. - -SHA-224 hashes are stored on-chain as 56 hexadecimal characters. - -See [the SHA-2 Wikipedia page](https://en.wikipedia.org/wiki/SHA-2) for more information. - -| literal | -| ---------- | -| `"sha224"` | - -### Snappy - -Represents the Snappy algorithm. Snappy is a compression/decompression algorithm that does not aim for maximum compression. Instead, it aims for very high speeds and reasonable compression. - -See [the snappy web page](https://google.github.io/snappy/) for more information. - -| literal | -| ---------- | -| `"snappy"` | - -### Timestamp - -A point in time in nanosecond precision. - -This type can represent times from 1970-01-01T00:00:00Z to 2554-07-21T23:34:33Z. - -## Examples - -````# use cosmwasm_std::Timestamp; let ts = Timestamp::from_nanos(1_000_000_202); assert_eq!(ts.nanos(), 1_000_000_202); assert_eq!(ts.seconds(), 1); assert_eq!(ts.subsec_nanos(), 202); - -let ts = ts.plus_seconds(2); assert_eq!(ts.nanos(), 3_000_000_202); assert_eq!(ts.seconds(), 3); assert_eq!(ts.subsec_nanos(), 202); ``` - - - -### Uint128 - -A string containing a 128-bit integer in decimal representation. - -|type| -|----| -|**string**.| - -### Uint64 - -A thin wrapper around u64 that is using strings for JSON encoding/decoding, such that the full u64 range can be used for clients that convert JSON numbers to floats, like JavaScript and jq. - -# Examples - -Use `from` to create instances of this and `u64` to get the value out: - -``` # use cosmwasm_std::Uint64; let a = Uint64::from(42u64); assert_eq!(a.u64(), 42); - -let b = Uint64::from(70u32); assert_eq!(b.u64(), 70); ``` - -|type| -|----| -|**string**.| - -### undefined - -Propose to transfer the contract's ownership to another account, optionally with an expiry time. - -Can only be called by the contract's current owner. - -Any existing pending ownership transfer is overwritten. - -|property|description| -|----------|-----------| -|`transfer_ownership`|*(Required.) * **object**. | -|`transfer_ownership.expiry`|**[Expiration](#expiration)\|null**. | -|`transfer_ownership.new_owner`|*(Required.) * **string**. | - ---- - -*Rendered by [Fadroma](https://fadroma.tech) ([@fadroma/schema 1.1.0](https://www.npmjs.com/package/@fadroma/schema)) from `axone-objectarium.json` (`9a941ffdcb12ddd9`)* -```` diff --git a/packages/axone-cognitarium-client/README.md b/packages/axone-cognitarium-client/README.md deleted file mode 100644 index 5823eee6..00000000 --- a/packages/axone-cognitarium-client/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Cognitarium client - -Package that holds components to interact with the `axone-cognitarium` contract. diff --git a/packages/axone-cognitarium-client/src/client.rs b/packages/axone-cognitarium-client/src/client.rs deleted file mode 100644 index 374ff942..00000000 --- a/packages/axone-cognitarium-client/src/client.rs +++ /dev/null @@ -1,60 +0,0 @@ -use axone_cognitarium::msg::{DataFormat, ExecuteMsg, QueryMsg, SelectResponse}; -use axone_cognitarium::parser::SelectQuery; -use cosmwasm_std::{ - to_json_binary, Addr, Binary, Coin, CustomQuery, QuerierWrapper, QueryRequest, StdResult, - WasmMsg, WasmQuery, -}; -use serde::de::DeserializeOwned; -use serde::Serialize; - -pub struct CognitariumClient { - address: Addr, -} - -impl CognitariumClient { - pub fn new(address: Addr) -> Self { - Self { address } - } - - pub fn select( - &self, - querier: QuerierWrapper<'_, C>, - query: SelectQuery, - ) -> StdResult { - self.query_wasm(querier, &QueryMsg::Select { query }) - } - - pub fn insert_data(&self, format: Option, data: Binary) -> StdResult { - self.to_wasm_exec_msg(&ExecuteMsg::InsertData { format, data }, vec![]) - } - - fn query_wasm(&self, querier: QuerierWrapper<'_, C>, msg: &T) -> StdResult - where - C: CustomQuery, - T: Serialize + ?Sized, - U: DeserializeOwned, - { - querier.query(&QueryRequest::Wasm(self.to_wasm_query_msg(msg)?)) - } - - fn to_wasm_exec_msg(&self, msg: &T, funds: Vec) -> StdResult - where - T: Serialize + ?Sized, - { - Ok(WasmMsg::Execute { - contract_addr: self.address.to_string(), - msg: to_json_binary(msg)?, - funds, - }) - } - - fn to_wasm_query_msg(&self, msg: &T) -> StdResult - where - T: Serialize + ?Sized, - { - Ok(WasmQuery::Smart { - contract_addr: self.address.to_string(), - msg: to_json_binary(msg)?, - }) - } -} diff --git a/packages/axone-cognitarium-client/src/lib.rs b/packages/axone-cognitarium-client/src/lib.rs deleted file mode 100644 index be50984f..00000000 --- a/packages/axone-cognitarium-client/src/lib.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod client; - -pub use client::*; diff --git a/packages/axone-cognitarium-client/Cargo.toml b/packages/axone-dummy-lib/Cargo.toml similarity index 52% rename from packages/axone-cognitarium-client/Cargo.toml rename to packages/axone-dummy-lib/Cargo.toml index 51da34c7..7b7e9b5a 100644 --- a/packages/axone-cognitarium-client/Cargo.toml +++ b/packages/axone-dummy-lib/Cargo.toml @@ -1,15 +1,11 @@ [package] authors = { workspace = true } +description = "Placeholder shared logic kept during refactor." edition = { workspace = true } -name = "axone-cognitarium-client" - -description = "A client library for the Axone Cognitarium Smart Contract." homepage = { workspace = true } license = { workspace = true } +name = "axone-dummy-lib" repository = { workspace = true } version = { workspace = true } [dependencies] -axone-cognitarium.workspace = true -cosmwasm-std.workspace = true -serde.workspace = true diff --git a/packages/axone-cognitarium-client/Makefile.toml b/packages/axone-dummy-lib/Makefile.toml similarity index 100% rename from packages/axone-cognitarium-client/Makefile.toml rename to packages/axone-dummy-lib/Makefile.toml diff --git a/packages/axone-dummy-lib/README.md b/packages/axone-dummy-lib/README.md new file mode 100644 index 00000000..25ddbce5 --- /dev/null +++ b/packages/axone-dummy-lib/README.md @@ -0,0 +1,3 @@ +# Axone Dummy Library + +This is a simple dummy library kept as a placeholder during the refactoring of the AXONE protocol contracts. It does not implement any real functionality. diff --git a/packages/axone-dummy-lib/src/lib.rs b/packages/axone-dummy-lib/src/lib.rs new file mode 100644 index 00000000..098cf36c --- /dev/null +++ b/packages/axone-dummy-lib/src/lib.rs @@ -0,0 +1,3 @@ +pub fn greeting() -> &'static str { + "hello from axone-dummy-lib" +} diff --git a/packages/axone-logic-bindings/Cargo.toml b/packages/axone-logic-bindings/Cargo.toml deleted file mode 100644 index 092d73bd..00000000 --- a/packages/axone-logic-bindings/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -authors = { workspace = true } -edition = { workspace = true } -name = "axone-logic-bindings" - -description = "A library defining the bindings for querying the AXONE logic module." -homepage = { workspace = true } -license = { workspace = true } -repository = { workspace = true } -version = { workspace = true } - -[dependencies] -cosmwasm-std.workspace = true -schemars.workspace = true -serde.workspace = true -thiserror.workspace = true diff --git a/packages/axone-logic-bindings/Makefile.toml b/packages/axone-logic-bindings/Makefile.toml deleted file mode 100644 index 2d9b5d19..00000000 --- a/packages/axone-logic-bindings/Makefile.toml +++ /dev/null @@ -1 +0,0 @@ -[tasks.schema] diff --git a/packages/axone-logic-bindings/README.md b/packages/axone-logic-bindings/README.md deleted file mode 100644 index d27d6a02..00000000 --- a/packages/axone-logic-bindings/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Logic bindings - -Package that holds all bindings for querying the AXONE logic module. diff --git a/packages/axone-logic-bindings/src/atom.rs b/packages/axone-logic-bindings/src/atom.rs deleted file mode 100644 index 709e772f..00000000 --- a/packages/axone-logic-bindings/src/atom.rs +++ /dev/null @@ -1,42 +0,0 @@ -/// Convert a Rust string to a Prolog atom. -pub fn as_prolog_atom(s: &str) -> String { - let mut escaped = String::with_capacity(s.len() + 2); - escaped.push('\''); - for c in s.chars() { - if c == '\'' { - escaped.push('\\'); - escaped.push(c); - } else { - escaped.push(c); - } - } - escaped.push('\''); - - escaped -} - -#[cfg(test)] -mod tests { - use super::as_prolog_atom; - - #[test] - fn test_as_prolog_atom() { - let test_cases = vec![ - ("empty string", "", "''"), - ("simple case", "hello", "'hello'"), - ("space in the string", "hello world", "'hello world'"), - ("single quote in the middle", "foo'bar", "'foo\\'bar'"), - ("enclosed single quotes", "'foo bar'", "'\\'foo bar\\''"), - ("cosmwasm URI", "cosmwasm:name:address?query=%7B%22object_data%22%3A%7B%22id%22%3A%221a88ca1632c7323c0aa594000cda26ed9f48b36351c29c3d1e35e0a0474e862e%22%7D%7D", "'cosmwasm:name:address?query=%7B%22object_data%22%3A%7B%22id%22%3A%221a88ca1632c7323c0aa594000cda26ed9f48b36351c29c3d1e35e0a0474e862e%22%7D%7D'") - ]; - - for (_, input, expected) in test_cases { - let actual = as_prolog_atom(input); - assert_eq!( - actual, expected, - "as_prolog_atom({:?}) should produce {:?}, but got {:?}", - input, expected, actual - ); - } - } -} diff --git a/packages/axone-logic-bindings/src/error.rs b/packages/axone-logic-bindings/src/error.rs deleted file mode 100644 index e64f4e28..00000000 --- a/packages/axone-logic-bindings/src/error.rs +++ /dev/null @@ -1,29 +0,0 @@ -use std::string::FromUtf8Error; -use thiserror::Error; - -#[derive(Debug, Eq, Error, PartialEq)] -pub enum TermParseError { - #[error("Value is not UTF-8 encoded: {0}")] - NotUtf8Value(FromUtf8Error), - - #[error("Reach unexpected EOF")] - Eof, - - #[error("Expected ',' or end of sequence and got: '{0}'")] - ExpectedSeqToken(char), - - #[error("Unexpected end of array or tuple")] - UnexpectedEndOfSeq, - - #[error("Forbidden token in value: '{0}'")] - UnexpectedValueToken(char), - - #[error("Unexpected root token: '{0}'")] - UnexpectedRootToken(char), - - #[error("Empty value in array or tuple")] - EmptyValue, - - #[error("Empty tuple")] - EmptyTuple, -} diff --git a/packages/axone-logic-bindings/src/lib.rs b/packages/axone-logic-bindings/src/lib.rs deleted file mode 100644 index 566710ab..00000000 --- a/packages/axone-logic-bindings/src/lib.rs +++ /dev/null @@ -1,13 +0,0 @@ -pub mod error; -mod query; -mod term_parser; - -pub use atom::as_prolog_atom; -pub use query::{Answer, AskResponse, LogicCustomQuery, Result, Substitution}; -pub use term_parser::TermValue; - -// Exposed for testing only -// Both unit tests and integration tests are compiled to native code, so everything in here does not need to compile to Wasm. -mod atom; -#[cfg(not(target_arch = "wasm32"))] -pub mod testing; diff --git a/packages/axone-logic-bindings/src/query.rs b/packages/axone-logic-bindings/src/query.rs deleted file mode 100644 index 2b5f6dca..00000000 --- a/packages/axone-logic-bindings/src/query.rs +++ /dev/null @@ -1,81 +0,0 @@ -use crate::error::TermParseError; -use crate::term_parser::{from_str, TermValue}; -use cosmwasm_std::CustomQuery; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -#[serde(rename_all = "snake_case")] -pub enum LogicCustomQuery { - Ask { program: String, query: String }, -} - -impl CustomQuery for LogicCustomQuery {} - -#[derive(Clone, Debug, Default, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -#[serde(rename_all = "snake_case")] -pub struct AskResponse { - pub height: u64, - pub gas_used: u64, - pub answer: Option, - pub user_output: Option, -} - -#[derive(Clone, Debug, Default, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -#[serde(rename_all = "snake_case")] -pub struct Answer { - pub has_more: bool, - pub variables: Vec, - pub results: Vec, -} - -impl Answer { - /// Create a new Answer with an error message. - pub fn from_error(error: String) -> Self { - Self { - has_more: false, - variables: vec![], - results: vec![Result { - error: Some(error), - substitutions: vec![], - }], - } - } -} - -#[derive(Clone, Debug, Default, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -#[serde(rename_all = "snake_case")] -pub struct Result { - pub error: Option, - pub substitutions: Vec, -} - -#[derive(Clone, Debug, Default, Deserialize, Eq, JsonSchema, PartialEq, Serialize)] -#[serde(rename_all = "snake_case")] -pub struct Substitution { - pub variable: String, - pub expression: String, -} - -impl Substitution { - pub fn parse_expression(self) -> std::result::Result { - from_str(self.expression.as_str()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn term_parse() { - assert_eq!( - Substitution { - variable: "X".to_string(), - expression: "'hello'".to_string(), - } - .parse_expression(), - Ok(TermValue::Value("hello".to_string())) - ); - } -} diff --git a/packages/axone-logic-bindings/src/term_parser.rs b/packages/axone-logic-bindings/src/term_parser.rs deleted file mode 100644 index 8002c275..00000000 --- a/packages/axone-logic-bindings/src/term_parser.rs +++ /dev/null @@ -1,398 +0,0 @@ -use crate::error::TermParseError; - -/// Represents a Prolog response term element which can be a tuple, an array or a string value. -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum TermValue { - Tuple(Vec), - Array(Vec), - Value(String), -} - -struct Parser<'a> { - slice: &'a [u8], - index: usize, -} - -impl<'a> Parser<'a> { - pub fn new(slice: &'a [u8]) -> Parser<'a> { - Parser { slice, index: 0 } - } - - fn peek(&mut self) -> Option { - self.slice.get(self.index).cloned() - } - - fn eat_char(&mut self) { - self.index += 1; - } - - fn parse_seq(&mut self, end_seq: u8) -> Result, TermParseError> { - let mut values: Vec = Vec::new(); - loop { - values.push(match self.peek() { - None => Err(TermParseError::Eof), - Some(t) if t == end_seq => { - if !values.is_empty() { - return Err(TermParseError::UnexpectedEndOfSeq); - } - self.eat_char(); - break; - } - Some(b'[') => { - self.eat_char(); - self.parse_array() - } - Some(b'(') => { - self.eat_char(); - self.parse_tuple() - } - Some(b'\'') => { - self.eat_char(); - self.parse_escaped_value() - } - Some(_) => self.parse_value(), - }?); - - match self.peek() { - Some(t) if t == end_seq => { - self.eat_char(); - break; - } - Some(b',') => { - self.eat_char(); - } - Some(t) => Err(TermParseError::ExpectedSeqToken(char::from(t)))?, - None => Err(TermParseError::Eof)?, - } - } - Ok(values) - } - - fn parse_array(&mut self) -> Result { - self.parse_seq(b']').map(TermValue::Array) - } - - fn parse_tuple(&mut self) -> Result { - self.parse_seq(b')') - .and_then(|elem: Vec| { - if elem.is_empty() { - return Err(TermParseError::EmptyTuple); - } - Ok(elem) - }) - .map(TermValue::Tuple) - } - - fn parse_value(&mut self) -> Result { - let mut bytes: Vec = Vec::new(); - loop { - match self.peek() { - Some(t) if [b'[', b'(', b'\'', b'"', b' '].contains(&t) => { - Err(TermParseError::UnexpectedValueToken(char::from(t)))?; - } - Some(b) if ![b']', b')', b','].contains(&b) => { - self.eat_char(); - bytes.push(b); - } - _ => break, - } - } - - if bytes.is_empty() { - return Err(TermParseError::EmptyValue); - } - - String::from_utf8(bytes) - .map_err(TermParseError::NotUtf8Value) - .map(TermValue::Value) - } - - fn parse_escaped_value(&mut self) -> Result { - let mut bytes: Vec = Vec::new(); - loop { - match self.peek() { - Some(b'\'') => { - self.eat_char(); - break; - } - Some(b'\\') => { - self.eat_char(); - match self.peek() { - Some(b'\'') => { - self.eat_char(); - bytes.push(b'\''); - } - _ => { - bytes.push(b'\\'); - } - } - } - Some(b) => { - self.eat_char(); - bytes.push(b); - } - None => Err(TermParseError::Eof)?, - } - } - - String::from_utf8(bytes) - .map_err(TermParseError::NotUtf8Value) - .map(TermValue::Value) - } - - fn parse(&mut self) -> Result { - let mut values: Vec = Vec::new(); - loop { - match self.peek() { - Some(b'[') => { - self.eat_char(); - values.push(self.parse_array()?); - } - Some(b'(') => { - self.eat_char(); - values.push(self.parse_tuple()?); - } - Some(b'\'') => { - self.eat_char(); - values.push(self.parse_escaped_value()?); - } - Some(_) => { - values.push(self.parse_value()?); - } - _ => {} - } - - match self.peek() { - Some(b',') => { - self.eat_char(); - } - None => { - break; - } - Some(t) => Err(TermParseError::UnexpectedRootToken(char::from(t)))?, - } - } - - if values.is_empty() { - return Ok(TermValue::Value(String::default())); - } - - if values.len() == 1 { - return Ok(values[0].clone()); - } - - Ok(TermValue::Tuple(values)) - } -} - -/// Parses a Prolog response term from bytes -pub fn from_slice(v: &[u8]) -> Result { - let mut parser = Parser::new(v); - let value = parser.parse()?; - - Ok(value) -} - -/// Parses a Prolog response term from a string -pub fn from_str(s: &str) -> Result { - from_slice(s.as_bytes()) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn parse_value() { - let cases = vec![ - ("hello", Ok(TermValue::Value("hello".to_string()))), - ("47", Ok(TermValue::Value("47".to_string()))), - ("25.18", Ok(TermValue::Value("25.18".to_string()))), - ("78/foo", Ok(TermValue::Value("78/foo".to_string()))), - ("", Err(TermParseError::EmptyValue)), - ("]", Err(TermParseError::EmptyValue)), - (")", Err(TermParseError::EmptyValue)), - (",", Err(TermParseError::EmptyValue)), - ("foo[", Err(TermParseError::UnexpectedValueToken('['))), - ("foo(", Err(TermParseError::UnexpectedValueToken('('))), - ("foo'", Err(TermParseError::UnexpectedValueToken('\''))), - ("foo\"", Err(TermParseError::UnexpectedValueToken('"'))), - ("foo ", Err(TermParseError::UnexpectedValueToken(' '))), - ]; - - for case in cases { - let res = Parser { - slice: case.0.as_bytes(), - index: 0, - } - .parse_value(); - assert_eq!(res, case.1); - } - - let res = Parser { - slice: &[255u8], - index: 0, - } - .parse_value(); - assert!(res.is_err()); - matches!(res.err().unwrap(), TermParseError::NotUtf8Value(_)); - } - - #[test] - fn parse_escaped_value() { - let cases = vec![ - ("hello'", Ok(TermValue::Value("hello".to_string()))), - ( - "47.18/\\foo&é#@'", - Ok(TermValue::Value("47.18/\\foo&é#@".to_string())), - ), - ( - "can \\' quote'", - Ok(TermValue::Value("can ' quote".to_string())), - ), - ( - " a '", - Ok(TermValue::Value(" a ".to_string())), - ), - ( - "and an emoji 👌'", - Ok(TermValue::Value("and an emoji 👌".to_string())), - ), - ("eof", Err(TermParseError::Eof)), - ]; - - for case in cases { - let res = Parser { - slice: case.0.as_bytes(), - index: 0, - } - .parse_escaped_value(); - assert_eq!(res, case.1); - } - - let res = Parser { - slice: &[255u8, b'\''], - index: 0, - } - .parse_escaped_value(); - assert!(res.is_err()); - matches!(res.err().unwrap(), TermParseError::NotUtf8Value(_)); - } - - #[test] - fn parse_array() { - let cases = vec![ - ("[]", Ok(TermValue::Array(vec![]))), - ("[,]", Err(TermParseError::EmptyValue)), - ("[[]", Err(TermParseError::Eof)), - ("[", Err(TermParseError::Eof)), - ( - "[hello]", - Ok(TermValue::Array(vec![TermValue::Value( - "hello".to_string(), - )])), - ), - ("['hello'oups]", Err(TermParseError::ExpectedSeqToken('o'))), - ( - "[hello,'cosmonaut']", - Ok(TermValue::Array(vec![ - TermValue::Value("hello".to_string()), - TermValue::Value("cosmonaut".to_string()), - ])), - ), - ( - "[hello,'cosmonaut',]", - Err(TermParseError::UnexpectedEndOfSeq), - ), - ( - "[hello, 'cosmonaut']", - Err(TermParseError::UnexpectedValueToken(' ')), - ), - ( - "[[],[[]],['that\\'s a lot!']]", - Ok(TermValue::Array(vec![ - TermValue::Array(vec![]), - TermValue::Array(vec![TermValue::Array(vec![])]), - TermValue::Array(vec![TermValue::Value("that's a lot!".to_string())]), - ])), - ), - ]; - - for case in cases { - let res = from_str(case.0); - assert_eq!(res, case.1); - } - } - - #[test] - fn parse_tuple() { - let cases = vec![ - ( - "(1,2)", - Ok(TermValue::Tuple(vec![ - TermValue::Value("1".to_string()), - TermValue::Value("2".to_string()), - ])), - ), - ("()", Err(TermParseError::EmptyTuple)), - ("(,)", Err(TermParseError::EmptyValue)), - ("((1,2)", Err(TermParseError::Eof)), - ("(", Err(TermParseError::Eof)), - ( - "(((1,2),(1,2,3)),('that\\'s',' a lot!'))", - Ok(TermValue::Tuple(vec![ - TermValue::Tuple(vec![ - TermValue::Tuple(vec![ - TermValue::Value("1".to_string()), - TermValue::Value("2".to_string()), - ]), - TermValue::Tuple(vec![ - TermValue::Value("1".to_string()), - TermValue::Value("2".to_string()), - TermValue::Value("3".to_string()), - ]), - ]), - TermValue::Tuple(vec![ - TermValue::Value("that's".to_string()), - TermValue::Value(" a lot!".to_string()), - ]), - ])), - ), - ]; - - for case in cases { - let res = from_str(case.0); - assert_eq!(res, case.1); - } - } - - #[test] - fn parse() { - let cases = vec![ - ("", Ok(TermValue::Value("".to_string()))), - ("hello", Ok(TermValue::Value("hello".to_string()))), - ("'hello'", Ok(TermValue::Value("hello".to_string()))), - ("(1,2))", Err(TermParseError::UnexpectedRootToken(')'))), - ("[]]", Err(TermParseError::UnexpectedRootToken(']'))), - ( - "[hello],([[],'an \\' escape'],'an emoji 👌'),[cosmos]", - Ok(TermValue::Tuple(vec![ - TermValue::Array(vec![TermValue::Value("hello".to_string())]), - TermValue::Tuple(vec![ - TermValue::Array(vec![ - TermValue::Array(vec![]), - TermValue::Value("an \' escape".to_string()), - ]), - TermValue::Value("an emoji 👌".to_string()), - ]), - TermValue::Array(vec![TermValue::Value("cosmos".to_string())]), - ])), - ), - ]; - - for case in cases { - let res = from_str(case.0); - assert_eq!(res, case.1); - } - } -} diff --git a/packages/axone-logic-bindings/src/testing/mock.rs b/packages/axone-logic-bindings/src/testing/mock.rs deleted file mode 100644 index 30241c8f..00000000 --- a/packages/axone-logic-bindings/src/testing/mock.rs +++ /dev/null @@ -1,50 +0,0 @@ -use crate::LogicCustomQuery; -use cosmwasm_std::testing::{MockApi, MockQuerier, MockStorage}; -use cosmwasm_std::{Coin, OwnedDeps, QuerierResult}; -use std::marker::PhantomData; - -pub fn mock_dependencies_with_logic_handler( - handler: LH, -) -> OwnedDeps, LogicCustomQuery> -where - LH: Fn(&LogicCustomQuery) -> QuerierResult + 'static, -{ - OwnedDeps { - storage: MockStorage::default(), - api: MockApi::default(), - querier: MockLogicQuerier::new(LogicQuerier::new(Box::new(handler)), &[]), - custom_query_type: PhantomData, - } -} - -trait MockLogicQuerier { - fn new(logic: LogicQuerier, balances: &[(&str, &[Coin])]) -> Self; -} - -impl MockLogicQuerier for MockQuerier { - fn new(logic: LogicQuerier, balances: &[(&str, &[Coin])]) -> Self { - MockQuerier::new(balances).with_custom_handler(Box::new(logic.handler)) - } -} - -struct LogicQuerier { - /// A handler to handle Logic queries. This is set to a dummy handler that - /// always return a successful foo / bar response by default. Update it via `update_handler`. - /// - /// Use box to avoid the need of generic type. - handler: Box Fn(&'a LogicCustomQuery) -> QuerierResult>, -} - -impl LogicQuerier { - fn new(handler: Box Fn(&'a LogicCustomQuery) -> QuerierResult>) -> Self { - Self { handler } - } - - #[allow(dead_code)] - fn update_handler(&mut self, handler: LH) - where - LH: Fn(&LogicCustomQuery) -> QuerierResult + 'static, - { - self.handler = Box::from(handler); - } -} diff --git a/packages/axone-logic-bindings/src/testing/mod.rs b/packages/axone-logic-bindings/src/testing/mod.rs deleted file mode 100644 index 8c39c80a..00000000 --- a/packages/axone-logic-bindings/src/testing/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -#![cfg(not(target_arch = "wasm32"))] - -// Exposed for testing only -// Both unit tests and integration tests are compiled to native code, so everything in here does not need to compile to Wasm. - -pub mod mock; diff --git a/packages/axone-objectarium-client/Cargo.toml b/packages/axone-objectarium-client/Cargo.toml deleted file mode 100644 index 30ca0093..00000000 --- a/packages/axone-objectarium-client/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -authors = { workspace = true } -edition = { workspace = true } -name = "axone-objectarium-client" - -description = "A client library for the Axone Objectarium Smart Contract." -homepage = { workspace = true } -license = { workspace = true } -repository = { workspace = true } -version = { workspace = true } - -[dependencies] -axone-objectarium.workspace = true -axone-wasm.workspace = true -cosmwasm-std.workspace = true -serde.workspace = true diff --git a/packages/axone-objectarium-client/Makefile.toml b/packages/axone-objectarium-client/Makefile.toml deleted file mode 100644 index 2d9b5d19..00000000 --- a/packages/axone-objectarium-client/Makefile.toml +++ /dev/null @@ -1 +0,0 @@ -[tasks.schema] diff --git a/packages/axone-objectarium-client/README.md b/packages/axone-objectarium-client/README.md deleted file mode 100644 index f6b4960a..00000000 --- a/packages/axone-objectarium-client/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Objectarium client - -Package that holds components to interact with the `axone-objectarium` contract. diff --git a/packages/axone-objectarium-client/src/lib.rs b/packages/axone-objectarium-client/src/lib.rs deleted file mode 100644 index 7f7d2ee6..00000000 --- a/packages/axone-objectarium-client/src/lib.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod object; - -pub use object::ObjectRef; diff --git a/packages/axone-objectarium-client/src/object.rs b/packages/axone-objectarium-client/src/object.rs deleted file mode 100644 index 77dbb4e1..00000000 --- a/packages/axone-objectarium-client/src/object.rs +++ /dev/null @@ -1,195 +0,0 @@ -use axone_objectarium::msg::QueryMsg::ObjectData; -use axone_objectarium::msg::{ExecuteMsg, QueryMsg}; -use axone_wasm::error::CosmwasmUriError; -use axone_wasm::uri::CosmwasmUri; -use cosmwasm_std::{to_json_binary, Coin, StdResult, WasmMsg}; -use serde::{Deserialize, Serialize}; - -const CONTRACT_NAME: &str = "axone-objectarium"; - -/// Represents a reference to an Object stored in the `axone-objectarium` contract. -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct ObjectRef { - /// The object id in the `axone-objectarium` contract. - pub object_id: String, - - /// The `axone-objectarium` contract address on which the object is stored. - pub storage_address: String, -} - -impl ObjectRef { - fn to_wasm_exec_msg(&self, msg: &T, funds: Vec) -> StdResult - where - T: Serialize + ?Sized, - { - Ok(WasmMsg::Execute { - contract_addr: self.storage_address.clone(), - msg: to_json_binary(msg)?, - funds, - }) - } - - pub fn to_exec_forget_msg(&self, funds: Vec) -> StdResult { - self.to_wasm_exec_msg( - &ExecuteMsg::ForgetObject { - id: self.object_id.clone(), - }, - funds, - ) - } - - pub fn to_exec_pin_msg(&self, funds: Vec) -> StdResult { - self.to_wasm_exec_msg( - &ExecuteMsg::PinObject { - id: self.object_id.clone(), - }, - funds, - ) - } - - pub fn to_exec_unpin_msg(&self, funds: Vec) -> StdResult { - self.to_wasm_exec_msg( - &ExecuteMsg::UnpinObject { - id: self.object_id.clone(), - }, - funds, - ) - } -} - -impl TryFrom for ObjectRef { - type Error = CosmwasmUriError; - - fn try_from(value: CosmwasmUri) -> Result { - let address = value.contract_address.clone(); - value - .into_query::() - .and_then(|query: QueryMsg| match query { - ObjectData { id: object_id } => Ok(ObjectRef { - storage_address: address, - object_id, - }), - _ => Err(CosmwasmUriError::Malformed( - "wrong query content".to_string(), - )), - }) - } -} - -impl TryFrom for CosmwasmUri { - type Error = CosmwasmUriError; - - fn try_from(value: ObjectRef) -> Result { - CosmwasmUri::try_new( - Some(CONTRACT_NAME.to_string()), - value.storage_address, - &ObjectData { - id: value.object_id, - }, - ) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use cosmwasm_std::from_json; - - #[test] - fn uri_to_object() { - let cases = vec![ - ( - CosmwasmUri { - contract_name: None, - contract_address: "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv".to_string(), - raw_query: "{\"object_data\":{\"id\":\"4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05\"}}".to_string(), - }, - Ok(ObjectRef { - object_id: "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05".to_string(), - storage_address: "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv".to_string(), - }), - ), - ( - CosmwasmUri { - contract_name: None, - contract_address: "address".to_string(), - raw_query: "{\"object\":{\"id\":\"myid\"}}".to_string(), - }, - Err(CosmwasmUriError::Malformed( - "wrong query content".to_string(), - )), - ), - ]; - - for case in cases { - let res = ObjectRef::try_from(case.0); - assert_eq!(res, case.1); - } - } - - #[test] - fn object_to_uri() { - let res = CosmwasmUri::try_from(ObjectRef { - object_id: "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05" - .to_string(), - storage_address: "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv" - .to_string(), - }); - - assert!(res.is_ok()); - assert_eq!(res.unwrap().to_string(), "cosmwasm:axone-objectarium:axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv?query=%7B%22object_data%22%3A%7B%22id%22%3A%224cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05%22%7D%7D"); - } - - #[test] - fn object_to_wasm_msg() { - let funds = vec![Coin::new(100u128, "uknow")]; - let object = ObjectRef { - object_id: "4cbe36399aabfcc7158ee7a66cbfffa525bb0ceab33d1ff2cff08759fe0a9b05" - .to_string(), - storage_address: "axone1ffzp0xmjhwkltuxcvccl0z9tyfuu7txp5ke0tpkcjpzuq9fcj3pq85yqlv" - .to_string(), - }; - - type ToExecuteMsgFn = Box) -> StdResult>; - let cases: Vec<(ToExecuteMsgFn, ExecuteMsg)> = vec![ - ( - Box::from(|obj: ObjectRef, f| obj.to_exec_forget_msg(f)), - ExecuteMsg::ForgetObject { - id: object.object_id.clone(), - }, - ), - ( - Box::from(|obj: ObjectRef, f| obj.to_exec_pin_msg(f)), - ExecuteMsg::PinObject { - id: object.object_id.clone(), - }, - ), - ( - Box::from(|obj: ObjectRef, f| obj.to_exec_unpin_msg(f)), - ExecuteMsg::UnpinObject { - id: object.object_id.clone(), - }, - ), - ]; - - for case in cases { - let res = case.0(object.clone(), funds.clone()); - assert!(res.is_ok()); - - match res.unwrap() { - WasmMsg::Execute { - contract_addr: addr, - msg, - funds: f, - } => { - assert_eq!(addr, object.storage_address.clone()); - assert_eq!(f, funds); - let exec_res = from_json::(&msg); - assert!(exec_res.is_ok()); - assert_eq!(exec_res.unwrap(), case.1) - } - _ => panic!("Expected 'WasmMsg::Execute'"), - } - } - } -} diff --git a/packages/axone-rdf/Cargo.toml b/packages/axone-rdf/Cargo.toml deleted file mode 100644 index 4bd82b2f..00000000 --- a/packages/axone-rdf/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -authors = { workspace = true } -edition = { workspace = true } -name = "axone-rdf" - -description = "Utility library offering essential components for efficiently handling RDF data within the Axone protocol." -homepage = { workspace = true } -license = { workspace = true } -repository = { workspace = true } -version = { workspace = true } - -[dependencies] -base16ct = { version = "0.2.0", features = ["alloc"] } -cosmwasm-std.workspace = true -itertools = "0.14.0" -rio_api.workspace = true -rio_turtle.workspace = true -rio_xml.workspace = true -sha2 = "0.10.9" -thiserror.workspace = true diff --git a/packages/axone-rdf/Makefile.toml b/packages/axone-rdf/Makefile.toml deleted file mode 100644 index 2d9b5d19..00000000 --- a/packages/axone-rdf/Makefile.toml +++ /dev/null @@ -1 +0,0 @@ -[tasks.schema] diff --git a/packages/axone-rdf/README.md b/packages/axone-rdf/README.md deleted file mode 100644 index 7cb11c7a..00000000 --- a/packages/axone-rdf/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# RDF - -Package that holds useful components to manage with `RDF` data, typically reading / writing. diff --git a/packages/axone-rdf/src/dataset.rs b/packages/axone-rdf/src/dataset.rs deleted file mode 100644 index 3e3de61e..00000000 --- a/packages/axone-rdf/src/dataset.rs +++ /dev/null @@ -1,235 +0,0 @@ -use crate::owned_model::OwnedQuad; -use itertools::Itertools; -use rio_api::model::{GraphName, NamedNode, Quad, Subject, Term}; -use std::collections::HashSet; -use std::slice::Iter; - -#[derive(Clone, Debug, PartialEq)] -pub struct Dataset<'a> { - quads: Vec>, -} - -impl<'a> AsRef<[Quad<'a>]> for Dataset<'a> { - fn as_ref(&self) -> &[Quad<'a>] { - self.quads.as_slice() - } -} - -impl<'a> From<&'a [OwnedQuad]> for Dataset<'a> { - fn from(value: &'a [OwnedQuad]) -> Self { - let quads = value.iter().map(Quad::from).collect(); - Dataset::new(quads) - } -} - -impl<'a> Dataset<'a> { - pub fn new(quads: Vec>) -> Self { - Self { quads } - } - - pub fn iter(&self) -> Iter<'_, Quad<'a>> { - self.quads.iter() - } - - pub fn match_pattern( - &'a self, - s: Option>, - p: Option>, - o: Option>, - g: Option>>, - ) -> QuadPatternFilter<'a, Iter<'a, Quad<'a>>> { - self.iter().match_pattern((s, p, o, g).into()) - } - - pub fn skip_pattern( - &'a self, - s: Option>, - p: Option>, - o: Option>, - g: Option>>, - ) -> QuadPatternFilter<'a, Iter<'a, Quad<'a>>> { - self.iter().skip_pattern((s, p, o, g).into()) - } - - pub fn sub_graph(&'a self, subject: Subject<'a>) -> Dataset<'a> { - Self::new(Self::sub_graph_from_quads(self.as_ref(), HashSet::new(), subject).0) - } - - fn sub_graph_from_quads( - quads: &'a [Quad<'a>], - mut visited: HashSet>, - subject: Subject<'a>, - ) -> (Vec>, HashSet>) { - let mut sub_graph = vec![]; - for quad in quads - .iter() - .match_pattern((Some(subject), None, None, None).into()) - { - sub_graph.push(*quad); - - let maybe_node: Option> = match quad.object { - Term::NamedNode(n) => Some(n.into()), - Term::BlankNode(n) => Some(n.into()), - _ => None, - }; - - if let Some(s) = maybe_node.filter(|n| !visited.contains(n)) { - visited.insert(subject); - let (new_quads, new_visited) = Self::sub_graph_from_quads(quads, visited, s); - visited = new_visited; - sub_graph.extend(new_quads); - } - } - - (sub_graph, visited) - } -} - -#[derive(Clone, Copy)] -pub struct QuadPattern<'a> { - subject: Option>, - predicate: Option>, - object: Option>, - graph_name: Option>>, -} - -impl<'a> - From<( - Option>, - Option>, - Option>, - Option>>, - )> for QuadPattern<'a> -{ - fn from( - value: ( - Option>, - Option>, - Option>, - Option>>, - ), - ) -> Self { - Self { - subject: value.0, - predicate: value.1, - object: value.2, - graph_name: value.3, - } - } -} - -impl QuadPattern<'_> { - pub fn match_pattern<'a>(self, quad: &'a Quad<'a>) -> bool { - self.subject.map_or_else(|| true, |s| s == quad.subject) - && self.predicate.map_or_else(|| true, |p| p == quad.predicate) - && self.object.map_or_else(|| true, |o| o == quad.object) - && self - .graph_name - .map_or_else(|| true, |g| g == quad.graph_name) - } -} - -pub trait QuadIterator<'a>: Iterator> { - fn match_patterns(self, patterns: Vec>) -> QuadPatternFilter<'a, Self> - where - Self: Sized, - { - QuadPatternFilter::new(self, patterns, QuadPatternFilterKind::Match) - } - - fn skip_patterns(self, patterns: Vec>) -> QuadPatternFilter<'a, Self> - where - Self: Sized, - { - QuadPatternFilter::new(self, patterns, QuadPatternFilterKind::Skip) - } - - fn match_pattern(self, pattern: QuadPattern<'a>) -> QuadPatternFilter<'a, Self> - where - Self: Sized, - { - self.match_patterns(vec![pattern]) - } - - fn skip_pattern(self, pattern: QuadPattern<'a>) -> QuadPatternFilter<'a, Self> - where - Self: Sized, - { - self.skip_patterns(vec![pattern]) - } - - fn subjects(self) -> Box> + 'a> - where - Self: Sized + 'a, - { - Box::from(self.map(|quad: &'a Quad<'a>| quad.subject).unique()) - } - - fn predicates(self) -> Box> + 'a> - where - Self: Sized + 'a, - { - Box::from(self.map(|quad: &'a Quad<'a>| quad.predicate).unique()) - } - - fn objects(self) -> Box> + 'a> - where - Self: Sized + 'a, - { - Box::from(self.map(|quad: &'a Quad<'a>| quad.object).unique()) - } - - fn graph_names(self) -> Box>> + 'a> - where - Self: Sized + 'a, - { - Box::from(self.map(|quad: &'a Quad<'a>| quad.graph_name).unique()) - } -} - -impl<'a, T: ?Sized> QuadIterator<'a> for T where T: Iterator> {} - -pub enum QuadPatternFilterKind { - Match, - Skip, -} - -pub struct QuadPatternFilter<'a, I> -where - I: Iterator>, -{ - patterns: Vec>, - filter_kind: QuadPatternFilterKind, - inner: I, -} - -impl<'a, I> QuadPatternFilter<'a, I> -where - I: Iterator>, -{ - pub fn new( - inner: I, - patterns: Vec>, - filter_kind: QuadPatternFilterKind, - ) -> Self { - Self { - patterns, - inner, - filter_kind, - } - } -} - -impl<'a, I> Iterator for QuadPatternFilter<'a, I> -where - I: Iterator>, -{ - type Item = &'a Quad<'a>; - - fn next(&mut self) -> Option { - self.inner.find(|quad| match self.filter_kind { - QuadPatternFilterKind::Match => self.patterns.iter().all(|p| p.match_pattern(quad)), - QuadPatternFilterKind::Skip => !self.patterns.iter().any(|p| p.match_pattern(quad)), - }) - } -} diff --git a/packages/axone-rdf/src/lib.rs b/packages/axone-rdf/src/lib.rs deleted file mode 100644 index ac1e26da..00000000 --- a/packages/axone-rdf/src/lib.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub mod dataset; -pub mod normalize; -pub mod owned_model; -pub mod serde; -pub mod uri; diff --git a/packages/axone-rdf/src/normalize.rs b/packages/axone-rdf/src/normalize.rs deleted file mode 100644 index 81f01ea7..00000000 --- a/packages/axone-rdf/src/normalize.rs +++ /dev/null @@ -1,767 +0,0 @@ -use itertools::Itertools; -use rio_api::model::{BlankNode, GraphName, Quad, Subject, Term}; -use sha2; -use sha2::Digest; -use std::collections::hash_map::{Entry, Iter}; -use std::collections::{BTreeMap, HashMap}; -use thiserror::Error; - -/// A RDF normalizer allowing to canonicalize RDF data, following the https://www.w3.org/TR/rdf-canon specification. -#[derive(Debug, Eq, PartialEq)] -pub struct Normalizer<'a> { - blank_node_to_quads: HashMap>>, - hash_to_blank_nodes: BTreeMap>, - blank_node_to_hash: HashMap, - canonical_issuer: IdentifierIssuer, -} - -#[derive(Debug, Eq, Error, PartialEq)] -pub enum NormalizationError { - /// An unexpected error denotes an error that should never occur. - #[error("An unexpected error occurred: {0}")] - Unexpected(String), -} - -impl<'a> Normalizer<'a> { - const CANONICAL_BLANK_NODES_IDENTIFIER_PREFIX: &'static str = "c14n"; - const TEMPORARY_BLANK_NODES_IDENTIFIER_PREFIX: &'static str = "b"; - - const HASH_FIRST_DEGREE_MARKER_SELF: &'static str = "a"; - const HASH_FIRST_DEGREE_MARKER_OTHER: &'static str = "z"; - - const HASH_RELATED_BLANK_NODE_POSITION_S: &'static str = "s"; - const HASH_RELATED_BLANK_NODE_POSITION_O: &'static str = "o"; - const HASH_RELATED_BLANK_NODE_POSITION_G: &'static str = "g"; - - pub fn new() -> Self { - Normalizer { - blank_node_to_quads: HashMap::new(), - hash_to_blank_nodes: BTreeMap::new(), - blank_node_to_hash: HashMap::new(), - canonical_issuer: IdentifierIssuer::new( - Self::CANONICAL_BLANK_NODES_IDENTIFIER_PREFIX, - 0u128, - ), - } - } - - pub fn normalize(&mut self, dataset: &[Quad<'a>]) -> Result { - self.reset(); - self.track_blank_nodes(dataset); - self.compute_first_degree_hashes(); - self.label_unique_nodes()?; - self.compute_n_degree_hashes()?; - - let swap_fn = |n| { - self.canonical_issuer.get(n).ok_or_else(|| { - NormalizationError::Unexpected( - "Could not replace blank node, canonical identifier not found".to_string(), - ) - }) - }; - let mut canonicalized_dataset = dataset.to_vec(); - for quad in canonicalized_dataset.iter_mut() { - quad.try_swap_blank_nodes(&swap_fn)?; - } - - Ok(Self::serialize(&canonicalized_dataset)) - } - - fn reset(&mut self) { - self.blank_node_to_quads = HashMap::new(); - self.hash_to_blank_nodes = BTreeMap::new(); - self.blank_node_to_hash = HashMap::new(); - self.canonical_issuer = - IdentifierIssuer::new(Self::CANONICAL_BLANK_NODES_IDENTIFIER_PREFIX, 0u128); - } - - fn track_blank_nodes(&mut self, dataset: &[Quad<'a>]) { - for quad in dataset { - for node in quad.blank_nodes() { - self.blank_node_to_quads - .entry(node) - .and_modify(|e| e.push(*quad)) - .or_insert(vec![*quad]); - } - } - } - - fn compute_first_degree_hashes(&mut self) { - for (target, quads) in &self.blank_node_to_quads { - let mut replacements = quads.clone(); - let swap_fn = |n| { - if n == target { - return Self::HASH_FIRST_DEGREE_MARKER_SELF; - } - Self::HASH_FIRST_DEGREE_MARKER_OTHER - }; - replacements.iter_mut().for_each(|quad| { - quad.swap_blank_nodes(&swap_fn); - }); - - let hash = Self::hash(Self::serialize(&replacements)); - self.hash_to_blank_nodes - .entry(hash.clone()) - .and_modify(|v| v.push(target.clone())) - .or_insert(vec![target.clone()]); - self.blank_node_to_hash.insert(target.clone(), hash); - } - } - - fn label_unique_nodes(&mut self) -> Result<(), NormalizationError> { - let unique_nodes = self - .hash_to_blank_nodes - .iter() - .filter(|(_, nodes)| nodes.len() <= 1) - .map(|(hash, nodes)| { - nodes - .first() - .ok_or_else(|| { - NormalizationError::Unexpected( - "Could not label unique node, node not found".to_string(), - ) - }) - .map(|node| (hash.clone(), node.clone())) - }) - .collect::, NormalizationError>>()?; - - for (hash, node) in unique_nodes { - self.hash_to_blank_nodes.remove(&hash); - self.canonical_issuer.get_or_issue(node); - } - - Ok(()) - } - - fn compute_n_degree_hashes(&mut self) -> Result<(), NormalizationError> { - for nodes in self - .hash_to_blank_nodes - .values() - .cloned() - .collect::>() - { - let mut hash_path_list: Vec<(String, IdentifierIssuer)> = - Vec::with_capacity(nodes.len()); - - for node in &nodes { - if self.canonical_issuer.issued(node) { - continue; - } - - let mut scoped_issuer = - IdentifierIssuer::new(Self::TEMPORARY_BLANK_NODES_IDENTIFIER_PREFIX, 0u128); - scoped_issuer.get_or_issue(node.clone()); - - let (n_degree_hash, issuer) = - self.compute_n_degree_hash(&mut scoped_issuer, node)?; - hash_path_list.push((n_degree_hash, issuer.clone())); - } - - hash_path_list.sort_by(|left, right| left.0.cmp(&right.0)); - for (_, issuer) in hash_path_list { - for node in issuer.issue_log { - self.canonical_issuer.get_or_issue(node); - } - } - } - - Ok(()) - } - - fn compute_n_degree_hash( - &mut self, - scoped_issuer: &mut IdentifierIssuer, - node: &String, - ) -> Result<(String, IdentifierIssuer), NormalizationError> { - let mut hashes: BTreeMap> = BTreeMap::new(); - - for quad in self.blank_node_to_quads.get(node).ok_or_else(|| { - NormalizationError::Unexpected( - "Could not compute n degree hash, quads for node not found".to_string(), - ) - })? { - for (related, position) in [ - match quad.subject { - Subject::BlankNode(BlankNode { id }) if id != node => { - Some((id, Self::HASH_RELATED_BLANK_NODE_POSITION_S)) - } - _ => None, - }, - match quad.object { - Term::BlankNode(BlankNode { id }) if id != node => { - Some((id, Self::HASH_RELATED_BLANK_NODE_POSITION_O)) - } - _ => None, - }, - match quad.graph_name { - Some(GraphName::BlankNode(BlankNode { id })) if id != node => { - Some((id, Self::HASH_RELATED_BLANK_NODE_POSITION_G)) - } - _ => None, - }, - ] - .iter() - .flatten() - { - let hash = - self.compute_related_blank_node_hash(quad, scoped_issuer, related, position)?; - - hashes - .entry(hash) - .and_modify(|v| v.push(related.to_string())) - .or_insert(vec![related.to_string()]); - } - } - - let mut hasher = sha2::Sha256::new(); - let mut chosen_issuer = - IdentifierIssuer::new(Self::TEMPORARY_BLANK_NODES_IDENTIFIER_PREFIX, 0u128); - let mut chosen_path = String::new(); - - for (hash, related) in hashes { - hasher.update(hash); - - for p in related.as_slice().permutations() { - let mut issuer = scoped_issuer.clone(); - let mut path = String::from("_:"); - let mut recursion_list = Vec::new(); - - for related in p { - if let Some(id) = self.canonical_issuer.get(&related) { - path.push_str(id); - } else { - if !issuer.issued(&related) { - recursion_list.push(related.clone()); - } - path.push_str(issuer.get_str_or_issue(related)); - } - } - - if !chosen_path.is_empty() && path.len() >= chosen_path.len() && path > chosen_path - { - continue; - } - - for related in recursion_list { - let (result, mut issuer) = self.compute_n_degree_hash(&mut issuer, &related)?; - path.push_str("_:"); - path.push_str(issuer.get_str_or_issue(related)); - path.push('<'); - path.push_str(&result); - path.push('>'); - - if !chosen_path.is_empty() - && path.len() >= chosen_path.len() - && path > chosen_path - { - continue; - } - } - - if chosen_path.is_empty() || chosen_path > path { - chosen_path = path; - chosen_issuer = issuer; - } - } - - hasher.update(chosen_path.as_str()); - } - - Ok(( - base16ct::lower::encode_string(&hasher.finalize()), - chosen_issuer, - )) - } - - fn compute_related_blank_node_hash( - &self, - quad: &Quad<'_>, - scoped_issuer: &mut IdentifierIssuer, - node: &str, - position: &str, - ) -> Result { - let mut hasher = sha2::Sha256::new(); - hasher.update(position); - if position != Self::HASH_RELATED_BLANK_NODE_POSITION_G { - hasher.update("<"); - hasher.update(quad.predicate.iri); - hasher.update(">"); - } - - hasher.update( - self.canonical_issuer - .get(node) - .or_else(|| scoped_issuer.get(node)) - .map(|s| format!("_:{0}", s)) - .or_else(|| self.blank_node_to_hash.get(node).cloned()) - .ok_or_else(|| { - NormalizationError::Unexpected( - "Could not compute related node hash, node not found".to_string(), - ) - })?, - ); - - Ok(base16ct::lower::encode_string(&hasher.finalize())) - } - - fn hash(data: String) -> String { - let mut hasher = sha2::Sha256::new(); - hasher.update(data); - let hash = hasher.finalize().to_vec(); - - base16ct::lower::encode_string(&hash) - } - - fn serialize(quads: &[Quad<'_>]) -> String { - let mut raw_sorted = BTreeMap::new(); - for quad in quads { - raw_sorted.insert(format!("{} .\n", quad), ()); - } - - raw_sorted.keys().join("") - } -} - -impl<'a> Default for Normalizer<'a> { - fn default() -> Self { - Self::new() - } -} - -/// Canonical blank node identifier issuer, specified by: https://www.w3.org/TR/rdf-canon/#issue-identifier. -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct IdentifierIssuer { - prefix: String, - pub counter: u128, - issued: HashMap, - issue_log: Vec, -} - -impl IdentifierIssuer { - pub fn new(prefix: &str, counter_offset: u128) -> Self { - Self { - prefix: prefix.to_string(), - counter: counter_offset, - issued: HashMap::new(), - issue_log: Vec::new(), - } - } - - pub fn get_or_issue(&mut self, identifier: String) -> (u128, &str) { - let res = match self.issued.entry(identifier.clone()) { - Entry::Occupied(e) => e.into_mut(), - Entry::Vacant(e) => { - let n = self.counter; - let str = format!("{}{}", self.prefix, n); - self.counter += 1; - - self.issue_log.push(identifier); - e.insert((n, str)) - } - }; - (res.0, res.1.as_str()) - } - - pub fn get_n_or_issue(&mut self, identifier: String) -> u128 { - self.get_or_issue(identifier).0 - } - - pub fn get_str_or_issue(&mut self, identifier: String) -> &str { - self.get_or_issue(identifier).1 - } - - pub fn get(&self, identifier: &str) -> Option<&str> { - self.issued.get(identifier).map(|(_, str)| str.as_str()) - } - - pub fn issued(&self, identifier: &str) -> bool { - self.issued.contains_key(identifier) - } - - pub fn issued_iter(&self) -> Iter<'_, String, (u128, String)> { - self.issued.iter() - } -} - -trait WithBlankNodes<'a> { - fn blank_nodes(&self) -> Vec; - - fn swap_blank_nodes(&mut self, swap_fn: &'a F) - where - F: Fn(&'a str) -> &'a str; - - fn try_swap_blank_nodes(&mut self, swap_fn: &'a F) -> Result<(), E> - where - F: Fn(&'a str) -> Result<&'a str, E>; -} - -impl<'a> WithBlankNodes<'a> for Quad<'a> { - fn blank_nodes(&self) -> Vec { - let mut nodes = Vec::new(); - - if let Subject::BlankNode(n) = self.subject { - nodes.push(n.id.to_string()); - } - if let Term::BlankNode(n) = self.object { - nodes.push(n.id.to_string()); - } - if let Some(GraphName::BlankNode(n)) = self.graph_name { - nodes.push(n.id.to_string()); - } - - nodes - } - - fn swap_blank_nodes(&mut self, swap_fn: &'a F) - where - F: Fn(&'a str) -> &'a str, - { - if let Subject::BlankNode(n) = self.subject { - self.subject = Subject::BlankNode(BlankNode { id: swap_fn(n.id) }); - } - if let Term::BlankNode(n) = self.object { - self.object = Term::BlankNode(BlankNode { id: swap_fn(n.id) }); - } - if let Some(GraphName::BlankNode(n)) = self.graph_name { - self.graph_name = Some(GraphName::BlankNode(BlankNode { id: swap_fn(n.id) })); - } - } - - fn try_swap_blank_nodes(&mut self, swap_fn: &'a F) -> Result<(), E> - where - F: Fn(&'a str) -> Result<&'a str, E>, - { - if let Subject::BlankNode(n) = self.subject { - self.subject = Subject::BlankNode(BlankNode { id: swap_fn(n.id)? }); - } - if let Term::BlankNode(n) = self.object { - self.object = Term::BlankNode(BlankNode { id: swap_fn(n.id)? }); - } - if let Some(GraphName::BlankNode(n)) = self.graph_name { - self.graph_name = Some(GraphName::BlankNode(BlankNode { id: swap_fn(n.id)? })); - } - - Ok(()) - } -} - -/// Helper iterator over all the possible permutations of an array. -/// It internally implements the quickperm algorithm: https://www.quickperm.org. -struct PermutationsIter { - next: Option>, - a: Vec, - p: Vec, - i: usize, -} - -impl PermutationsIter { - pub fn new(src: &[T]) -> Self { - let mut p = Vec::with_capacity(src.len() + 1); - for i in 0..=src.len() { - p.push(i); - } - - Self { - next: Some(src.to_vec()), - a: src.to_vec(), - p, - i: 1, - } - } - - fn permute(&mut self) -> Option> { - if self.i >= self.a.len() { - None?; - } - - (&mut self.p)[self.i] -= 1; - let j = (self.i % 2) * (&self.p)[self.i]; - - self.a.swap(j, self.i); - self.i = 1; - - while (&self.p)[self.i] == 0 { - (&mut self.p)[self.i] = self.i; - self.i += 1; - } - - Some(self.a.clone()) - } -} - -impl Iterator for PermutationsIter { - type Item = Vec; - - fn next(&mut self) -> Option { - let crt = self.next.clone()?; - self.next = self.permute(); - Some(crt) - } -} - -trait Permutable { - fn permutations(&self) -> PermutationsIter; -} - -impl Permutable for &[T] { - fn permutations(&self) -> PermutationsIter { - PermutationsIter::new(self) - } -} - -#[cfg(test)] -mod test { - use super::*; - use rio_api::model::{Literal, NamedNode}; - - #[test] - fn normalize() { - let cases = vec![ - ( - vec![ - Quad { - subject: Subject::NamedNode(NamedNode { - iri: "http://example.com/#p", - }), - predicate: NamedNode { - iri: "http://example.com/#q", - }, - object: Term::BlankNode(BlankNode { id: "e0" }), - graph_name: None, - }, - Quad { - subject: Subject::NamedNode(NamedNode { - iri: "http://example.com/#p", - }), - predicate: NamedNode { - iri: "http://example.com/#r", - }, - object: Term::BlankNode(BlankNode { id: "e1" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e0" }), - predicate: NamedNode { - iri: "http://example.com/#s", - }, - object: Term::NamedNode(NamedNode { - iri: "http://example.com/#u", - }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e1" }), - predicate: NamedNode { - iri: "http://example.com/#t", - }, - object: Term::NamedNode(NamedNode { - iri: "http://example.com/#u", - }), - graph_name: None, - }, - ], - "197dce9a2a3f3c4bb4591910b3762146423c1a4f6901e3789490d1f28fd5e796".to_string(), - ), - ( - vec![ - Quad { - subject: Subject::NamedNode(NamedNode { - iri: "http://example.com/#p", - }), - predicate: NamedNode { - iri: "http://example.com/#q", - }, - object: Term::BlankNode(BlankNode { id: "e0" }), - graph_name: None, - }, - Quad { - subject: Subject::NamedNode(NamedNode { - iri: "http://example.com/#p", - }), - predicate: NamedNode { - iri: "http://example.com/#q", - }, - object: Term::BlankNode(BlankNode { id: "e1" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e0" }), - predicate: NamedNode { - iri: "http://example.com/#p", - }, - object: Term::BlankNode(BlankNode { id: "e2" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e1" }), - predicate: NamedNode { - iri: "http://example.com/#p", - }, - object: Term::BlankNode(BlankNode { id: "e3" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e2" }), - predicate: NamedNode { - iri: "http://example.com/#r", - }, - object: Term::BlankNode(BlankNode { id: "e3" }), - graph_name: None, - }, - ], - "a561b3db619593d5d255343fe8e40411fdc35836e8a995ffc84b4d54ad9cfabc".to_string(), - ), - ( - vec![ - Quad { - subject: Subject::BlankNode(BlankNode { id: "e0" }), - predicate: NamedNode { - iri: "http://example.com/#p1", - }, - object: Term::BlankNode(BlankNode { id: "e1" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e1" }), - predicate: NamedNode { - iri: "http://example.com/#p2", - }, - object: Term::Literal(Literal::Simple { value: "Foo" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e2" }), - predicate: NamedNode { - iri: "http://example.com/#p1", - }, - object: Term::BlankNode(BlankNode { id: "e3" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e3" }), - predicate: NamedNode { - iri: "http://example.com/#p2", - }, - object: Term::Literal(Literal::Simple { value: "Foo" }), - graph_name: None, - }, - ], - "f69f0a9035e18f6c3ab7e0a2a98d2594b19fa05ebebe5cb2efdc0f9d756a8136".to_string(), - ), - ( - vec![ - Quad { - subject: Subject::BlankNode(BlankNode { id: "e0" }), - predicate: NamedNode { - iri: "http://example.org/vocab#next", - }, - object: Term::BlankNode(BlankNode { id: "e1" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e0" }), - predicate: NamedNode { - iri: "http://example.org/vocab#prev", - }, - object: Term::BlankNode(BlankNode { id: "e1" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e1" }), - predicate: NamedNode { - iri: "http://example.org/vocab#next", - }, - object: Term::BlankNode(BlankNode { id: "e0" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e1" }), - predicate: NamedNode { - iri: "http://example.org/vocab#prev", - }, - object: Term::BlankNode(BlankNode { id: "e0" }), - graph_name: None, - }, - ], - "63e7fb42c6e41ed4b4465cacefbdd27c618e6ec088fd331c92aea1bbadb9a2f1".to_string(), - ), - ( - vec![ - Quad { - subject: Subject::BlankNode(BlankNode { id: "e0" }), - predicate: NamedNode { - iri: "http://example.com/#p1", - }, - object: Term::BlankNode(BlankNode { id: "e1" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e1" }), - predicate: NamedNode { - iri: "http://example.com/#p2", - }, - object: Term::Literal(Literal::Simple { value: "Foo" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e1" }), - predicate: NamedNode { - iri: "http://example.com/#p3", - }, - object: Term::BlankNode(BlankNode { id: "g0" }), - graph_name: None, - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e0" }), - predicate: NamedNode { - iri: "http://example.com/#p1", - }, - object: Term::BlankNode(BlankNode { id: "e1" }), - graph_name: Some(GraphName::BlankNode(BlankNode { id: "g0" })), - }, - Quad { - subject: Subject::BlankNode(BlankNode { id: "e1" }), - predicate: NamedNode { - iri: "http://example.com/#p2", - }, - object: Term::Literal(Literal::Simple { value: "Bar" }), - graph_name: Some(GraphName::BlankNode(BlankNode { id: "g0" })), - }, - ], - "94ac982a844fa31a439f98427978be93a1b489988aea0b939cdcc32d6bb4fddc".to_string(), - ), - ]; - - for case in cases { - let mut normalizer = Normalizer::new(); - let res = normalizer.normalize(&case.0); - assert_eq!(res.is_ok(), true); - assert_eq!(Normalizer::hash(res.unwrap()), case.1); - } - } - - #[test] - fn permutations() { - let cases: Vec<(Vec, Vec>)> = vec![ - (vec![], vec![vec![]]), - (vec![1], vec![vec![1]]), - (vec![1, 2], vec![vec![1, 2], vec![2, 1]]), - ( - vec![1, 2, 3], - vec![ - vec![1, 2, 3], - vec![2, 1, 3], - vec![3, 1, 2], - vec![1, 3, 2], - vec![2, 3, 1], - vec![3, 2, 1], - ], - ), - ]; - - for case in cases { - let result: Vec> = case.0.as_slice().permutations().collect(); - assert_eq!(result, case.1); - } - } -} diff --git a/packages/axone-rdf/src/owned_model.rs b/packages/axone-rdf/src/owned_model.rs deleted file mode 100644 index ef5bdc08..00000000 --- a/packages/axone-rdf/src/owned_model.rs +++ /dev/null @@ -1,153 +0,0 @@ -use rio_api::model::{BlankNode, GraphName, Literal, NamedNode, Quad, Subject, Term}; - -#[derive(Debug)] -pub struct RDFStarUnsupported; - -pub struct OwnedQuad { - subject: OwnedSubject, - predicate: String, - object: OwnedTerm, - graph_name: Option, -} - -impl TryFrom> for OwnedQuad { - type Error = RDFStarUnsupported; - - fn try_from(value: Quad<'_>) -> Result { - Ok(Self { - subject: value.subject.try_into()?, - predicate: value.predicate.iri.to_owned(), - object: value.object.try_into()?, - graph_name: value.graph_name.map(OwnedGraphName::from), - }) - } -} - -impl<'a> From<&'a OwnedQuad> for Quad<'a> { - fn from(value: &'a OwnedQuad) -> Self { - Self { - subject: (&value.subject).into(), - predicate: NamedNode { - iri: value.predicate.as_str(), - }, - object: (&value.object).into(), - graph_name: value.graph_name.as_ref().map(GraphName::from), - } - } -} - -pub enum Id { - Named(String), - Blank(String), -} - -pub type OwnedSubject = Id; - -impl TryFrom> for OwnedSubject { - type Error = RDFStarUnsupported; - - fn try_from(value: Subject<'_>) -> Result { - Ok(match value { - Subject::NamedNode(n) => Self::Named(n.iri.to_owned()), - Subject::BlankNode(n) => Self::Blank(n.id.to_owned()), - Subject::Triple(_) => Err(RDFStarUnsupported {})?, - }) - } -} - -impl<'a> From<&'a OwnedSubject> for Subject<'a> { - fn from(value: &'a OwnedSubject) -> Self { - match value { - OwnedSubject::Named(iri) => NamedNode { iri }.into(), - OwnedSubject::Blank(id) => BlankNode { id }.into(), - } - } -} - -pub type OwnedGraphName = Id; - -impl From> for OwnedGraphName { - fn from(value: GraphName<'_>) -> Self { - match value { - GraphName::NamedNode(n) => Self::Named(n.iri.to_owned()), - GraphName::BlankNode(n) => Self::Blank(n.id.to_owned()), - } - } -} - -impl<'a> From<&'a OwnedGraphName> for GraphName<'a> { - fn from(value: &'a OwnedGraphName) -> Self { - match value { - OwnedGraphName::Named(iri) => NamedNode { iri }.into(), - OwnedGraphName::Blank(id) => BlankNode { id }.into(), - } - } -} - -pub enum OwnedTerm { - Named(String), - Blank(String), - Literal(OwnedLiteral), -} - -impl TryFrom> for OwnedTerm { - type Error = RDFStarUnsupported; - - fn try_from(value: Term<'_>) -> Result { - Ok(match value { - Term::NamedNode(n) => OwnedTerm::Named(n.iri.to_owned()), - Term::BlankNode(n) => OwnedTerm::Blank(n.id.to_owned()), - Term::Literal(l) => OwnedTerm::Literal(l.into()), - Term::Triple(_) => Err(RDFStarUnsupported)?, - }) - } -} - -impl<'a> From<&'a OwnedTerm> for Term<'a> { - fn from(value: &'a OwnedTerm) -> Self { - match value { - OwnedTerm::Named(iri) => NamedNode { iri }.into(), - OwnedTerm::Blank(id) => BlankNode { id }.into(), - OwnedTerm::Literal(l) => Term::Literal(l.into()), - } - } -} - -pub enum OwnedLiteral { - Simple(String), - LanguageTaggedString { value: String, language: String }, - Typed { value: String, datatype: String }, -} - -impl From> for OwnedLiteral { - fn from(value: Literal<'_>) -> Self { - match value { - Literal::Simple { value } => OwnedLiteral::Simple(value.to_owned()), - Literal::LanguageTaggedString { value, language } => { - OwnedLiteral::LanguageTaggedString { - value: value.to_owned(), - language: language.to_owned(), - } - } - Literal::Typed { value, datatype } => OwnedLiteral::Typed { - value: value.to_owned(), - datatype: datatype.iri.to_owned(), - }, - } - } -} - -impl<'a> From<&'a OwnedLiteral> for Literal<'a> { - fn from(l: &'a OwnedLiteral) -> Self { - match l { - OwnedLiteral::Simple(value) => Literal::Simple { value }, - OwnedLiteral::LanguageTaggedString { value, language } => { - Literal::LanguageTaggedString { value, language } - } - OwnedLiteral::Typed { value, datatype } => Literal::Typed { - value, - datatype: NamedNode { iri: datatype }, - }, - } - } -} diff --git a/packages/axone-rdf/src/serde.rs b/packages/axone-rdf/src/serde.rs deleted file mode 100644 index dbd5f9fe..00000000 --- a/packages/axone-rdf/src/serde.rs +++ /dev/null @@ -1,170 +0,0 @@ -use crate::owned_model::OwnedQuad; -use rio_api::formatter::TriplesFormatter; -use rio_api::model::{Quad, Triple}; -use rio_api::parser::{QuadsParser, TriplesParser}; -use rio_turtle::{ - NQuadsFormatter, NQuadsParser, NTriplesFormatter, NTriplesParser, TurtleError, TurtleFormatter, - TurtleParser, -}; -use rio_xml::{RdfXmlError, RdfXmlFormatter, RdfXmlParser}; -use std::io::{self, BufRead}; -use thiserror::Error; - -pub struct TripleReader { - parser: TriplesParserKind, -} - -pub struct NQuadsReader { - parser: NQuadsParser, -} - -#[derive(Debug, Error)] -pub enum NQuadsReadError { - #[error("RDF Star notation not supported")] - RDFStarUnsupported, - - #[error("Couldn't parse rdf: {0}")] - Parse(#[from] TurtleError), -} - -pub struct TripleWriter { - writer: TriplesWriterKind, -} - -#[allow(clippy::large_enum_variant)] -pub enum TriplesParserKind { - NTriples(NTriplesParser), - Turtle(TurtleParser), - RdfXml(RdfXmlParser), - NQuads(NQuadsParser), -} - -pub enum TriplesWriterKind { - NTriples(NTriplesFormatter), - Turtle(TurtleFormatter), - RdfXml(io::Result>), - NQuads(NQuadsFormatter), -} - -pub enum DataFormat { - /// Represents a [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) format. - RDFXml, - /// Represents a [Turtle](https://www.w3.org/TR/turtle/) format. - Turtle, - /// Represents a [N-Triples](https://www.w3.org/TR/n-triples/) format. - NTriples, - /// Represents a [N-Quads](https://www.w3.org/TR/n-quads/) format. - NQuads, -} - -impl TripleReader { - pub fn new(format: &DataFormat, src: R) -> Self { - TripleReader { - parser: match format { - DataFormat::RDFXml => TriplesParserKind::RdfXml(RdfXmlParser::new(src, None)), - DataFormat::Turtle => TriplesParserKind::Turtle(TurtleParser::new(src, None)), - DataFormat::NTriples => TriplesParserKind::NTriples(NTriplesParser::new(src)), - DataFormat::NQuads => TriplesParserKind::NQuads(NQuadsParser::new(src)), - }, - } - } - - pub fn read_all(&mut self, mut use_fn: UF) -> Result<(), E> - where - UF: FnMut(Triple<'_>) -> Result<(), E>, - E: From + From, - { - match &mut self.parser { - TriplesParserKind::NTriples(parser) => parser.parse_all(&mut use_fn), - TriplesParserKind::Turtle(parser) => parser.parse_all(&mut use_fn), - TriplesParserKind::RdfXml(parser) => parser.parse_all(&mut use_fn), - TriplesParserKind::NQuads(parser) => { - parser.parse_all(&mut |quad: Quad<'_>| -> Result<(), E> { - use_fn(Triple { - subject: quad.subject, - predicate: quad.predicate, - object: quad.object, - }) - }) - } - } - } -} - -impl NQuadsReader { - pub fn new(src: R) -> Self { - NQuadsReader { - parser: NQuadsParser::new(src), - } - } - - pub fn read_all(&mut self) -> Result, NQuadsReadError> { - let mut quads = vec![]; - - self.parser - .parse_all(&mut |quad| -> Result<(), NQuadsReadError> { - quads.push( - quad.try_into() - .map_err(|_| NQuadsReadError::RDFStarUnsupported)?, - ); - Ok(()) - })?; - - Ok(quads) - } -} - -impl TripleWriter { - pub fn new(format: &DataFormat, dst: W) -> Self { - TripleWriter { - writer: match format { - DataFormat::RDFXml => TriplesWriterKind::RdfXml(RdfXmlFormatter::new(dst)), - DataFormat::Turtle => TriplesWriterKind::Turtle(TurtleFormatter::new(dst)), - DataFormat::NTriples => TriplesWriterKind::NTriples(NTriplesFormatter::new(dst)), - DataFormat::NQuads => TriplesWriterKind::NQuads(NQuadsFormatter::new(dst)), - }, - } - } - - pub fn write(&mut self, triple: &Triple<'_>) -> io::Result<()> { - match &mut self.writer { - TriplesWriterKind::Turtle(formatter) => formatter.format(triple), - TriplesWriterKind::NTriples(formatter) => formatter.format(triple), - TriplesWriterKind::NQuads(formatter) => { - use rio_api::formatter::QuadsFormatter; - - let quad = &Quad { - subject: triple.subject, - predicate: triple.predicate, - object: triple.object, - graph_name: None, - }; - - formatter.format(quad) - } - TriplesWriterKind::RdfXml(format_result) => match format_result { - Ok(formatter) => formatter.format(triple), - Err(e) => Err(io::Error::new(io::ErrorKind::Other, e.to_string())), - }, - } - } - - pub fn write_all(&mut self, triples: Vec<&Triple<'_>>) -> io::Result<()> { - for triple in triples { - self.write(triple)?; - } - Ok(()) - } - - pub fn finish(self) -> io::Result { - match self.writer { - TriplesWriterKind::Turtle(formatter) => formatter.finish(), - TriplesWriterKind::NTriples(formatter) => formatter.finish(), - TriplesWriterKind::NQuads(formatter) => formatter.finish(), - TriplesWriterKind::RdfXml(format_result) => match format_result { - Ok(formatter) => formatter.finish(), - Err(e) => Err(io::Error::new(io::ErrorKind::Other, e.to_string())), - }, - } - } -} diff --git a/packages/axone-rdf/src/uri.rs b/packages/axone-rdf/src/uri.rs deleted file mode 100644 index cb1af8ea..00000000 --- a/packages/axone-rdf/src/uri.rs +++ /dev/null @@ -1,116 +0,0 @@ -use cosmwasm_std::{StdError, StdResult}; -use std::collections::HashMap; - -/// Explode a compacted URI (CURIE - URI with prefix) separating it from its prefix. -pub fn explode_iri(iri: &str) -> StdResult<(String, String)> { - let mut marker_index: Option = None; - for delim in ['#', '/', ':'] { - if let Some(index) = iri.rfind(delim) { - marker_index = match marker_index { - Some(i) => Some(i.max(index)), - None => Some(index), - } - } - } - - if let Some(index) = marker_index { - return Ok((iri[..=index].to_string(), iri[index + 1..].to_string())); - } - - Err(StdError::generic_err("Couldn't extract IRI namespace")) -} - -/// Expand a compacted URI (CURIE - URI with prefix) to a full URI. -pub fn expand_uri(curie: &str, prefixes: &HashMap) -> StdResult { - let idx = curie - .rfind(':') - .ok_or_else(|| StdError::generic_err(format!("Malformed CURIE: {curie}")))?; - - let prefix = curie[..idx].to_string(); - let namespace = prefixes - .get(&prefix) - .ok_or_else(|| StdError::generic_err(format!("Prefix not found: {prefix}")))?; - let suffix = curie[idx + 1..].to_string(); - - Ok(format!("{namespace}{suffix}")) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn proper_explode_iri() { - assert_eq!( - explode_iri("http://www.w3.org/2001/XMLSchema#dateTime"), - Ok(( - "http://www.w3.org/2001/XMLSchema#".to_string(), - "dateTime".to_string() - )) - ); - assert_eq!( - explode_iri("https://ontology.axone.space/core/Governance"), - Ok(( - "https://ontology.axone.space/core/".to_string(), - "Governance".to_string() - )) - ); - assert_eq!( - explode_iri( - "did:key:0x04d1f1b8f8a7a28f9a5a254c326a963a22f5a5b5d5f5e5d5c5b5a5958575655" - ), - Ok(( - "did:key:".to_string(), - "0x04d1f1b8f8a7a28f9a5a254c326a963a22f5a5b5d5f5e5d5c5b5a5958575655".to_string() - )) - ); - assert_eq!( - explode_iri("wow:this/is#weird"), - Ok(("wow:this/is#".to_string(), "weird".to_string())) - ); - assert_eq!( - explode_iri("this#is:weird/too"), - Ok(("this#is:weird/".to_string(), "too".to_string())) - ); - assert_eq!( - explode_iri("this_doesn't_work"), - Err(StdError::generic_err("Couldn't extract IRI namespace")) - ); - } - - #[test] - fn test_expand_uri() { - let prefixes = HashMap::from([ - ("ex".to_string(), "http://example.com/".to_string()), - ( - "rdf".to_string(), - "http://www.w3.org/1999/02/22-rdf-syntax-ns#".to_string(), - ), - ]); - - assert_eq!( - expand_uri("ex:resource", &prefixes), - Ok("http://example.com/resource".to_string()) - ); - - assert_eq!( - expand_uri("ex:", &prefixes), - Ok("http://example.com/".to_string()) - ); - - assert_eq!( - expand_uri("unknown:resource", &prefixes), - Err(StdError::generic_err("Prefix not found: unknown")) - ); - - assert_eq!( - expand_uri("malformed_curie:", &prefixes), - Err(StdError::generic_err("Prefix not found: malformed_curie")) - ); - - assert_eq!( - expand_uri("malformed_curie", &prefixes), - Err(StdError::generic_err("Malformed CURIE: malformed_curie")) - ); - } -} diff --git a/packages/axone-wasm/Cargo.toml b/packages/axone-wasm/Cargo.toml deleted file mode 100644 index 4c86e79b..00000000 --- a/packages/axone-wasm/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -authors = { workspace = true } -edition = { workspace = true } -name = "axone-wasm" - -description = "A library that defines CosmWasm URIs, enabling the identification of blockchain resources by referencing specific instantiated smart contracts." -homepage = { workspace = true } -license = { workspace = true } -repository = { workspace = true } -version = { workspace = true } - -[dependencies] -form_urlencoded = "1.2.2" -serde.workspace = true -serde-json-wasm.workspace = true -thiserror.workspace = true -url = "2.5.7" diff --git a/packages/axone-wasm/Makefile.toml b/packages/axone-wasm/Makefile.toml deleted file mode 100644 index 2d9b5d19..00000000 --- a/packages/axone-wasm/Makefile.toml +++ /dev/null @@ -1 +0,0 @@ -[tasks.schema] diff --git a/packages/axone-wasm/README.md b/packages/axone-wasm/README.md deleted file mode 100644 index 272648fc..00000000 --- a/packages/axone-wasm/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# WASM - -A library that defines CosmWasm URIs, enabling the identification of blockchain resources by referencing specific -instantiated smart contracts. diff --git a/packages/axone-wasm/src/error.rs b/packages/axone-wasm/src/error.rs deleted file mode 100644 index a6b9b761..00000000 --- a/packages/axone-wasm/src/error.rs +++ /dev/null @@ -1,17 +0,0 @@ -use thiserror::Error; -use url::ParseError; - -#[derive(Debug, Eq, Error, PartialEq)] -pub enum CosmwasmUriError { - #[error("{0}")] - ParseURI(#[from] ParseError), - - #[error("{0}")] - ParseQuery(String), - - #[error("{0}")] - SerializeQuery(String), - - #[error("Malformed URI: {0}")] - Malformed(String), -} diff --git a/packages/axone-wasm/src/lib.rs b/packages/axone-wasm/src/lib.rs deleted file mode 100644 index e533d91d..00000000 --- a/packages/axone-wasm/src/lib.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod error; -pub mod uri; diff --git a/packages/axone-wasm/src/uri.rs b/packages/axone-wasm/src/uri.rs deleted file mode 100644 index e792fef3..00000000 --- a/packages/axone-wasm/src/uri.rs +++ /dev/null @@ -1,281 +0,0 @@ -use crate::error::CosmwasmUriError; -use serde::{de, ser}; -use std::collections::HashMap; -use std::fmt::Display; -use url::Url; - -const COSMWASM_SCHEME: &str = "cosmwasm"; -const COSMWASM_QUERY_PARAM: &str = "query"; - -/// A CosmWasm URI identifies a resource on a blockchain by referencing a specific instantiated -/// smart contract. It includes the contract's address and uses query parameters to encode the message -/// intended for the contract. The resource identified by the URI is the response provided by the -/// smart contract following this query. -/// -/// Its general form is as follows: -/// -/// `cosmwasm:{contract_name}:{contract_address}?query={contract_query}` -/// -/// Where: -/// - `{contract_name}`: Only informative, represents the corresponding smart contract name or type (e.g. `axone-objectarium`); -/// - `{contract_address}`: The address of the smart contract to query; -/// - `{contract_query}`: The JSON query to perform on the targeted smart contract, URL encoded; -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct CosmwasmUri { - pub contract_name: Option, - pub contract_address: String, - pub raw_query: String, -} - -impl CosmwasmUri { - pub fn try_new( - contract_name: Option, - contract_address: String, - query: &T, - ) -> Result - where - T: ser::Serialize + ?Sized, - { - serde_json_wasm::to_string(query) - .map_err(|e| CosmwasmUriError::SerializeQuery(e.to_string())) - .map(|raw_query| CosmwasmUri { - contract_name, - contract_address, - raw_query, - }) - } - - pub fn into_query(self) -> Result - where - T: de::DeserializeOwned, - { - serde_json_wasm::from_str(self.raw_query.as_str()) - .map_err(|e| CosmwasmUriError::ParseQuery(e.to_string())) - } - - fn encode_query(self) -> String { - form_urlencoded::Serializer::new(String::new()) - .append_pair(COSMWASM_QUERY_PARAM, self.raw_query.as_str()) - .finish() - } -} - -impl TryFrom for CosmwasmUri { - type Error = CosmwasmUriError; - - fn try_from(value: String) -> Result { - Url::parse(value.as_str()) - .map_err(CosmwasmUriError::ParseURI) - .and_then(|uri: Url| { - if uri.scheme() != COSMWASM_SCHEME { - return Err(CosmwasmUriError::Malformed("wrong scheme".to_string())); - } - - let path = uri.path().to_string(); - let mut path_parts = path.split(':').map(String::from).collect::>(); - let (contract_name, contract_address) = - match (path_parts.pop(), path_parts.pop(), path_parts.pop()) { - (Some(address), Some(name), None) if !address.is_empty() => { - Ok((Some(name), address)) - } - (Some(address), None, None) if !address.is_empty() => Ok((None, address)), - _ => Err(CosmwasmUriError::Malformed("wrong path".to_string())), - }?; - - let queries = uri - .query_pairs() - .into_owned() - .collect::>(); - - match queries.get(COSMWASM_QUERY_PARAM) { - Some(raw_query) => Ok(CosmwasmUri { - contract_name, - contract_address, - raw_query: raw_query.clone(), - }), - _ => Err(CosmwasmUriError::Malformed( - "missing 'query' query parameter".to_string(), - )), - } - }) - } -} - -impl Display for CosmwasmUri { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let encoded_query = self.clone().encode_query(); - let str = match self.contract_name.clone() { - Some(name) => [ - COSMWASM_SCHEME, - ":", - name.as_str(), - ":", - self.contract_address.as_str(), - "?", - encoded_query.as_str(), - ] - .join(""), - _ => [ - COSMWASM_SCHEME, - ":", - self.contract_address.as_str(), - "?", - encoded_query.as_str(), - ] - .join(""), - }; - write!(f, "{}", str) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use serde::{Deserialize, Serialize}; - use url::ParseError; - - #[test] - fn proper_string_mappings() { - let cases = vec![ - ( - CosmwasmUri{ - contract_name: Some("name".to_string()), - contract_address: "address".to_string(), - raw_query: "".to_string() - }, - "cosmwasm:name:address?query=".to_string(), - ), - ( - CosmwasmUri{ - contract_name: Some("name".to_string()), - contract_address: "address".to_string(), - raw_query: "{\"object_data\":{\"id\":\"1a88ca1632c7323c0aa594000cda26ed9f48b36351c29c3d1e35e0a0474e862e\"}}".to_string() - }, - "cosmwasm:name:address?query=%7B%22object_data%22%3A%7B%22id%22%3A%221a88ca1632c7323c0aa594000cda26ed9f48b36351c29c3d1e35e0a0474e862e%22%7D%7D".to_string(), - ), - ( - CosmwasmUri{ - contract_name: None, - contract_address: "address".to_string(), - raw_query: "\"data\"".to_string() - }, - "cosmwasm:address?query=%22data%22".to_string(), - ), - ]; - - for case in cases { - assert_eq!(case.0.clone().to_string(), case.1); - let res = CosmwasmUri::try_from(case.1); - assert!(res.is_ok()); - assert_eq!(res.unwrap(), case.0); - } - } - - #[test] - fn parse_uri_error() { - let cases = vec![ - ( - "cosmwasm".to_string(), - CosmwasmUriError::ParseURI(ParseError::RelativeUrlWithoutBase), - ), - ( - "cw:name:address?query=".to_string(), - CosmwasmUriError::Malformed("wrong scheme".to_string()), - ), - ( - "cw:address?query=".to_string(), - CosmwasmUriError::Malformed("wrong scheme".to_string()), - ), - ( - "cosmwasm:too_much:name:address?query=".to_string(), - CosmwasmUriError::Malformed("wrong path".to_string()), - ), - ( - "cosmwasm:?query=".to_string(), - CosmwasmUriError::Malformed("wrong path".to_string()), - ), - ( - "cosmwasm:name:address?".to_string(), - CosmwasmUriError::Malformed("missing 'query' query parameter".to_string()), - ), - ( - "cosmwasm:name:address".to_string(), - CosmwasmUriError::Malformed("missing 'query' query parameter".to_string()), - ), - ]; - - for case in cases { - let res = CosmwasmUri::try_from(case.0); - assert!(res.is_err()); - assert_eq!(res.err().unwrap(), case.1); - } - } - - #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] - struct TestQuery { - pub content: String, - } - - #[test] - fn try_new() { - let cases = vec![ - ( - Some("name".to_string()), - "address".to_string(), - TestQuery { - content: "content".to_string(), - }, - "{\"content\":\"content\"}", - ), - ( - None, - "address".to_string(), - TestQuery { - content: "content".to_string(), - }, - "{\"content\":\"content\"}", - ), - ]; - - for case in cases { - let res = CosmwasmUri::try_new(case.0.clone(), case.1.clone(), &case.2); - - assert!(res.is_ok()); - let uri = res.unwrap(); - assert_eq!(uri.contract_name, case.0); - assert_eq!(uri.contract_address, case.1); - assert_eq!(uri.raw_query, case.3); - } - } - - #[test] - fn into_query() { - let cases = vec![ - ( - CosmwasmUri { - contract_name: None, - contract_address: "address".to_string(), - raw_query: "{\"content\":\"content\"}".to_string(), - }, - Ok(TestQuery { - content: "content".to_string(), - }), - ), - ( - CosmwasmUri { - contract_name: None, - contract_address: "address".to_string(), - raw_query: "".to_string(), - }, - Err(CosmwasmUriError::ParseQuery( - "EOF while parsing a JSON value.".to_string(), - )), - ), - ]; - - for case in cases { - let res = case.0.into_query::(); - assert_eq!(res, case.1); - } - } -} diff --git a/packages/testing/Cargo.toml b/packages/testing/Cargo.toml deleted file mode 100644 index 77c0bcc4..00000000 --- a/packages/testing/Cargo.toml +++ /dev/null @@ -1,9 +0,0 @@ -[package] -authors = ["AXONE"] -edition = "2021" -name = "testing" -publish = false -version = "5.0.0" - -[dependencies] -cosmwasm-std.workspace = true diff --git a/packages/testing/Makefile.toml b/packages/testing/Makefile.toml deleted file mode 100644 index 2d9b5d19..00000000 --- a/packages/testing/Makefile.toml +++ /dev/null @@ -1 +0,0 @@ -[tasks.schema] diff --git a/packages/testing/src/addr.rs b/packages/testing/src/addr.rs deleted file mode 100644 index f3913aa4..00000000 --- a/packages/testing/src/addr.rs +++ /dev/null @@ -1,10 +0,0 @@ -use cosmwasm_std::testing::MockApi; -use cosmwasm_std::Addr; - -pub const CREATOR: &str = "creator"; -pub const SENDER: &str = "sender"; -pub const OWNER: &str = "owner"; - -pub fn addr(input: &str) -> Addr { - MockApi::default().addr_make(input) -} diff --git a/packages/testing/src/lib.rs b/packages/testing/src/lib.rs deleted file mode 100644 index a5dfc51f..00000000 --- a/packages/testing/src/lib.rs +++ /dev/null @@ -1,4 +0,0 @@ -#![cfg(not(target_arch = "wasm32"))] - -pub mod addr; -pub mod mock; diff --git a/packages/testing/src/mock.rs b/packages/testing/src/mock.rs deleted file mode 100644 index e70ed2b0..00000000 --- a/packages/testing/src/mock.rs +++ /dev/null @@ -1,9 +0,0 @@ -use crate::addr::addr; -use cosmwasm_std::testing::{mock_env, MOCK_CONTRACT_ADDR}; -use cosmwasm_std::Env; - -pub fn mock_env_addr() -> Env { - let mut env = mock_env(); - env.contract.address = addr(MOCK_CONTRACT_ADDR); - env -}