diff --git a/Cargo.lock b/Cargo.lock index 00149d55..8479c2a9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -30,9 +30,15 @@ dependencies = [ [[package]] name = "aho-corasick" +<<<<<<< HEAD version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea5d730647d4fadd988536d06fecce94b7b4f2a7efdae548f1cf4b63205518ab" +======= +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c378d78423fdad8089616f827526ee33c19f2fddbd5de1629152c9593ba4783" +>>>>>>> fc1a273 (update node apis future and hyper) dependencies = [ "memchr 2.6.3", ] @@ -119,17 +125,6 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" -[[package]] -name = "async-trait" -version = "0.1.73" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" -dependencies = [ - "proc-macro2 1.0.66", - "quote 1.0.33", - "syn 2.0.29", -] - [[package]] name = "atty" version = "0.2.14" @@ -183,12 +178,9 @@ dependencies = [ [[package]] name = "base64" -version = "0.10.1" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" -dependencies = [ - "byteorder 1.4.3", -] +checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" [[package]] name = "base64" @@ -198,9 +190,15 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" +<<<<<<< HEAD version = "0.21.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2" +======= +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "414dcefbc63d77c526a76b3afcf6fbb9b5e2791c19c3aa2297733208750c6e53" +>>>>>>> fc1a273 (update node apis future and hyper) [[package]] name = "base64ct" @@ -366,7 +364,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" dependencies = [ "byteorder 1.4.3", - "either", "iovec", ] @@ -374,6 +371,15 @@ dependencies = [ name = "bytes" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" +<<<<<<< HEAD +======= +checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +>>>>>>> fc1a273 (update node apis future and hyper) checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" [[package]] @@ -409,9 +415,15 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" +<<<<<<< HEAD version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +======= +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defd4e7873dbddba6c7c91e199c7fcb946abc4a6a4ac3195400bcfb01b5de877" +>>>>>>> fc1a273 (update node apis future and hyper) dependencies = [ "android-tzdata", "iana-time-zone", @@ -484,11 +496,11 @@ checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" [[package]] name = "core-foundation" -version = "0.6.4" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25b9e03f145fd4f2bf705e07b900cd41fc636598fe5dc452fd0db1441c3f496d" +checksum = "57d24c7a13c43e870e37c1556b74555437870a04514f7685f5b354e090567171" dependencies = [ - "core-foundation-sys 0.6.2", + "core-foundation-sys 0.7.0", "libc", ] @@ -504,9 +516,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.6.2" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" +checksum = "b3a71ab494c0b5b860bdc8407ae08978052417070c2ced38573a9157ad75b8ac" [[package]] name = "core-foundation-sys" @@ -935,6 +947,7 @@ dependencies = [ "epic_util", "failure", "failure_derive", + "futures 0.3.28", "humansize", "log", "pancurses", @@ -957,6 +970,7 @@ name = "epic_api" version = "3.4.0" dependencies = [ "bigint", + "bytes 0.5.6", "easy-jsonrpc-mw", "epic_chain", "epic_core", @@ -964,12 +978,10 @@ dependencies = [ "epic_pool", "epic_store", "epic_util", - "failure", - "failure_derive", - "futures 0.1.31", - "http 0.1.21", - "hyper 0.12.36", - "hyper-rustls 0.17.1", + "futures 0.3.28", + "http", + "hyper 0.13.10", + "hyper-rustls", "hyper-timeout", "lazy_static", "log", @@ -979,11 +991,10 @@ dependencies = [ "serde", "serde_derive", "serde_json", - "tokio 0.1.22", - "tokio-core", - "tokio-rustls 0.10.3", - "tokio-tcp", - "url 1.7.2", + "thiserror", + "tokio 0.2.25", + "tokio-rustls", + "url 2.4.1", ] [[package]] @@ -1129,7 +1140,6 @@ dependencies = [ name = "epic_servers" version = "3.4.0" dependencies = [ - "async-trait", "bigint", "chrono", "clokwerk", @@ -1143,9 +1153,9 @@ dependencies = [ "epic_util", "fs2", "futures 0.3.28", - "http 0.1.21", + "http", "hyper 0.13.10", - "hyper-rustls 0.19.1", + "hyper-rustls", "itertools", "lmdb-zero", "log", @@ -1391,16 +1401,6 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" -[[package]] -name = "futures-cpupool" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" -dependencies = [ - "futures 0.1.31", - "num_cpus", -] - [[package]] name = "futures-executor" version = "0.3.28" @@ -1426,7 +1426,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2 1.0.66", "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.31", ] [[package]] @@ -1453,8 +1453,8 @@ dependencies = [ "futures-macro", "futures-sink", "futures-task", - "memchr 2.5.0", - "pin-project-lite 0.2.12", + "memchr 2.6.3", + "pin-project-lite 0.2.13", "pin-utils", "slab", ] @@ -1583,6 +1583,7 @@ dependencies = [ [[package]] name = "h2" +<<<<<<< HEAD version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a5b34c246847f938a410a03c5458c7fee2274436675e76d8b903c08efc29c462" @@ -1602,15 +1603,41 @@ dependencies = [ [[package]] name = "h2" version = "0.3.21" +======= +version = "0.2.7" +>>>>>>> fc1a273 (update node apis future and hyper) source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" dependencies = [ +<<<<<<< HEAD +======= + "bytes 0.5.6", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio 0.2.25", + "tokio-util 0.3.1", + "tracing", + "tracing-futures", +] + +[[package]] +name = "h2" +version = "0.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +dependencies = [ +>>>>>>> fc1a273 (update node apis future and hyper) "bytes 1.5.0", "fnv", "futures-core", "futures-sink", "futures-util", - "http 0.2.9", + "http", "indexmap", "slab", "tokio 1.32.0", @@ -1675,17 +1702,6 @@ dependencies = [ "winapi 0.3.9", ] -[[package]] -name = "http" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6ccf5ede3a895d8856620237b2f02972c1bbc78d2965ad7fe8838d4a0ed41f0" -dependencies = [ - "bytes 0.4.12", - "fnv", - "itoa 0.4.8", -] - [[package]] name = "http" version = "0.2.9" @@ -1697,18 +1713,6 @@ dependencies = [ "itoa 1.0.9", ] -[[package]] -name = "http-body" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6741c859c1b2463a423a1dbce98d418e6c3c3fc720fb0d45528657320920292d" -dependencies = [ - "bytes 0.4.12", - "futures 0.1.31", - "http 0.1.21", - "tokio-buf", -] - [[package]] name = "http-body" version = "0.3.1" @@ -1716,7 +1720,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13d5ff830006f7646652e057693569bfe0d51760c0085a071769d142a205111b" dependencies = [ "bytes 0.5.6", - "http 0.2.9", + "http", ] [[package]] @@ -1726,8 +1730,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ "bytes 1.5.0", +<<<<<<< HEAD "http 0.2.9", "pin-project-lite 0.2.12", +======= + "http", + "pin-project-lite 0.2.13", +>>>>>>> fc1a273 (update node apis future and hyper) ] [[package]] @@ -1759,6 +1768,7 @@ dependencies = [ [[package]] name = "hyper" +<<<<<<< HEAD version = "0.12.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c843caf6296fc1f93444735205af9ed4e109a539005abb2564ae1d6fad34c52" @@ -1785,6 +1795,28 @@ dependencies = [ "tokio-threadpool", "tokio-timer", "want 0.2.0", +======= +version = "0.13.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a6f157065790a3ed2f88679250419b5cdd96e714a0d65f7797fd337186e96bb" +dependencies = [ + "bytes 0.5.6", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.2.7", + "http", + "http-body 0.3.1", + "httparse", + "httpdate 0.3.2", + "itoa 0.4.8", + "pin-project", + "socket2 0.3.19", + "tokio 0.2.25", + "tower-service", + "tracing", + "want", +>>>>>>> fc1a273 (update node apis future and hyper) ] [[package]] @@ -1798,66 +1830,55 @@ dependencies = [ "futures-core", "futures-util", "h2 0.3.21", +<<<<<<< HEAD "http 0.2.9", +======= + "http", +>>>>>>> fc1a273 (update node apis future and hyper) "http-body 0.4.5", "httparse", "httpdate", "itoa 1.0.9", +<<<<<<< HEAD "pin-project-lite", +======= + "pin-project-lite 0.2.13", +>>>>>>> fc1a273 (update node apis future and hyper) "socket2 0.4.9", "tokio 1.32.0", "tower-service", "tracing", - "want 0.3.1", + "want", ] [[package]] name = "hyper-rustls" -version = "0.17.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719d85c7df4a7f309a77d145340a063ea929dcb2e025bae46a80345cffec2952" -dependencies = [ - "bytes 0.4.12", - "ct-logs", - "futures 0.1.31", - "hyper 0.12.36", - "rustls", - "tokio-io", - "tokio-rustls 0.10.3", - "webpki", - "webpki-roots", -] - -[[package]] -name = "hyper-rustls" -version = "0.19.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ea6215c7314d450ee45970ab8b3851ab447a0e6bafdd19e31b20a42dbb7faf" +checksum = "ac965ea399ec3a25ac7d13b8affd4b8f39325cca00858ddf5eb29b79e6b14b08" dependencies = [ "bytes 0.5.6", "ct-logs", "futures-util", "hyper 0.13.10", + "log", "rustls", "rustls-native-certs", "tokio 0.2.25", - "tokio-rustls 0.12.3", + "tokio-rustls", "webpki", ] [[package]] name = "hyper-timeout" -version = "0.2.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16ec7c8e526ed674ce148323e1f2010f76f546fcdca99a2c721e04bc7bf5b6f7" +checksum = "0d1f9b0b8258e3ef8f45928021d3ef14096c2b93b99e4b8cfcabf1f58ec84b0a" dependencies = [ - "futures 0.1.31", - "hyper 0.12.36", - "tokio 0.1.22", - "tokio-io", + "bytes 0.5.6", + "hyper 0.13.10", + "tokio 0.2.25", "tokio-io-timeout", - "tokio-reactor", - "tokio-service", ] [[package]] @@ -2655,7 +2676,11 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.67", "quote 1.0.33", +<<<<<<< HEAD "syn 2.0.37", +======= + "syn 2.0.31", +>>>>>>> fc1a273 (update node apis future and hyper) ] [[package]] @@ -2868,10 +2893,42 @@ dependencies = [ "syn 0.11.11", ] +[[package]] +<<<<<<< HEAD +======= +name = "pin-project" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +dependencies = [ + "proc-macro2 1.0.66", + "quote 1.0.33", + "syn 2.0.31", +] + +[[package]] +>>>>>>> fc1a273 (update node apis future and hyper) +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +<<<<<<< HEAD +======= +checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" + [[package]] name = "pin-project-lite" version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" +>>>>>>> fc1a273 (update node apis future and hyper) checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] @@ -3271,13 +3328,21 @@ version = "0.11.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" dependencies = [ +<<<<<<< HEAD "base64 0.21.4", +======= + "base64 0.21.3", +>>>>>>> fc1a273 (update node apis future and hyper) "bytes 1.5.0", "encoding_rs", "futures-core", "futures-util", "h2 0.3.21", +<<<<<<< HEAD "http 0.2.9", +======= + "http", +>>>>>>> fc1a273 (update node apis future and hyper) "http-body 0.4.5", "hyper 0.14.27", "hyper-tls", @@ -3288,7 +3353,11 @@ dependencies = [ "native-tls", "once_cell", "percent-encoding 2.3.0", +<<<<<<< HEAD "pin-project-lite", +======= + "pin-project-lite 0.2.13", +>>>>>>> fc1a273 (update node apis future and hyper) "serde", "serde_json", "serde_urlencoded", @@ -3379,9 +3448,15 @@ dependencies = [ [[package]] name = "rustix" +<<<<<<< HEAD version = "0.38.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "747c788e9ce8e92b12cd485c49ddf90723550b654b32508f979b71a7b1ecda4f" +======= +version = "0.38.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0c3dde1fc030af041adc40e79c0e7fbcf431dd24870053d187d7c66e4b87453" +>>>>>>> fc1a273 (update node apis future and hyper) dependencies = [ "bitflags 2.4.0", "errno", @@ -3392,11 +3467,11 @@ dependencies = [ [[package]] name = "rustls" -version = "0.16.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b25a18b1bf7387f0145e7f8324e700805aade3842dd3db2e74e4cdeb4677c09e" +checksum = "c0d4a31f5d68413404705d6982529b0e11a9aacd4839d1d6222ee3b8cb4015e1" dependencies = [ - "base64 0.10.1", + "base64 0.11.0", "log", "ring", "sct", @@ -3404,6 +3479,21 @@ dependencies = [ ] [[package]] +<<<<<<< HEAD +======= +name = "rustls-native-certs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75ffeb84a6bd9d014713119542ce415db3a3e4748f0bfce1e1416cd224a23a5" +dependencies = [ + "openssl-probe", + "rustls", + "schannel", + "security-framework 0.4.4", +] + +[[package]] +>>>>>>> fc1a273 (update node apis future and hyper) name = "ryu" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -3433,12 +3523,6 @@ dependencies = [ "windows-sys", ] -[[package]] -name = "scoped-tls" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "332ffa32bf586782a3efaeb58f127980944bbc8c4d6913a86107ac2a5ab24b28" - [[package]] name = "scopeguard" version = "0.3.3" @@ -3463,6 +3547,22 @@ dependencies = [ [[package]] name = "security-framework" +<<<<<<< HEAD +======= +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64808902d7d99f78eaddd2b4e2509713babc3dc3c85ad6f4c447680f3c01e535" +dependencies = [ + "bitflags 1.3.2", + "core-foundation 0.7.0", + "core-foundation-sys 0.7.0", + "libc", + "security-framework-sys 0.4.3", +] + +[[package]] +name = "security-framework" +>>>>>>> fc1a273 (update node apis future and hyper) version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" @@ -3476,11 +3576,18 @@ dependencies = [ [[package]] name = "security-framework-sys" +<<<<<<< HEAD version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +======= +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17bf11d99252f512695eb468de5516e5cf75455521e69dfe343f3b74e4748405" +>>>>>>> fc1a273 (update node apis future and hyper) dependencies = [ - "core-foundation-sys 0.6.2", + "core-foundation-sys 0.7.0", + "libc", ] [[package]] @@ -3535,7 +3642,11 @@ checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2 1.0.67", "quote 1.0.33", +<<<<<<< HEAD "syn 2.0.37", +======= + "syn 2.0.31", +>>>>>>> fc1a273 (update node apis future and hyper) ] [[package]] @@ -3700,6 +3811,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c19be23126415861cb3a23e501d34a708f7f9b2183c5252d690941c2e69199d5" [[package]] +<<<<<<< HEAD name = "string" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -3709,6 +3821,8 @@ dependencies = [ ] [[package]] +======= +>>>>>>> fc1a273 (update node apis future and hyper) name = "strsim" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -3767,9 +3881,15 @@ dependencies = [ [[package]] name = "syn" +<<<<<<< HEAD version = "2.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8" +======= +version = "2.0.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "718fa2415bcb8d8bd775917a1bf12a7931b6dfa890753378538118181e0cb398" +>>>>>>> fc1a273 (update node apis future and hyper) dependencies = [ "proc-macro2 1.0.67", "quote 1.0.33", @@ -3870,22 +3990,38 @@ dependencies = [ [[package]] name = "thiserror" +<<<<<<< HEAD version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4" +======= +version = "1.0.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d6d7a740b8a666a7e828dd00da9c0dc290dff53154ea77ac109281de90589b7" +>>>>>>> fc1a273 (update node apis future and hyper) dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" +<<<<<<< HEAD version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc" +======= +version = "1.0.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35" +>>>>>>> fc1a273 (update node apis future and hyper) dependencies = [ "proc-macro2 1.0.67", "quote 1.0.33", +<<<<<<< HEAD "syn 2.0.37", +======= + "syn 2.0.31", +>>>>>>> fc1a273 (update node apis future and hyper) ] [[package]] @@ -3909,17 +4045,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "time" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi 0.3.9", -] - [[package]] name = "tiny-keccak" version = "1.5.0" @@ -3970,6 +4095,33 @@ dependencies = [ [[package]] name = "tokio" +<<<<<<< HEAD +======= +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6703a273949a90131b290be1fe7b039d0fc884aa1935860dfcbe056f28cd8092" +dependencies = [ + "bytes 0.5.6", + "fnv", + "futures-core", + "iovec", + "lazy_static", + "libc", + "memchr 2.6.3", + "mio 0.6.23", + "mio-named-pipes", + "mio-uds", + "num_cpus", + "pin-project-lite 0.1.12", + "signal-hook-registry", + "slab", + "tokio-macros", + "winapi 0.3.9", +] + +[[package]] +name = "tokio" +>>>>>>> fc1a273 (update node apis future and hyper) version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" @@ -3978,22 +4130,16 @@ dependencies = [ "bytes 1.5.0", "libc", "mio 0.8.8", +<<<<<<< HEAD "pin-project-lite", "socket2 0.5.4", +======= + "pin-project-lite 0.2.13", + "socket2 0.5.3", +>>>>>>> fc1a273 (update node apis future and hyper) "windows-sys", ] -[[package]] -name = "tokio-buf" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fb220f46c53859a4b7ec083e41dec9778ff0b1851c0942b211edb89e0ccdc46" -dependencies = [ - "bytes 0.4.12", - "either", - "futures 0.1.31", -] - [[package]] name = "tokio-codec" version = "0.1.2" @@ -4005,25 +4151,6 @@ dependencies = [ "tokio-io", ] -[[package]] -name = "tokio-core" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87b1395334443abca552f63d4f61d0486f12377c2ba8b368e523f89e828cffd4" -dependencies = [ - "bytes 0.4.12", - "futures 0.1.31", - "iovec", - "log", - "mio 0.6.23", - "scoped-tls", - "tokio 0.1.22", - "tokio-executor", - "tokio-io", - "tokio-reactor", - "tokio-timer", -] - [[package]] name = "tokio-current-thread" version = "0.1.7" @@ -4068,14 +4195,12 @@ dependencies = [ [[package]] name = "tokio-io-timeout" -version = "0.3.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "135ce81f15cfd7982fac684f9057a1299eebeb79e98a8a709969b9aa51123129" +checksum = "9390a43272c8a6ac912ed1d1e2b6abeafd5047e05530a2fa304deee041a06215" dependencies = [ - "bytes 0.4.12", - "futures 0.1.31", - "tokio-io", - "tokio-timer", + "bytes 0.5.6", + "tokio 0.2.25", ] [[package]] @@ -4120,23 +4245,9 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d7cf08f990090abd6c6a73cab46fed62f85e8aef8b99e4b918a9f4a637f0676" -dependencies = [ - "bytes 0.4.12", - "futures 0.1.31", - "iovec", - "rustls", - "tokio-io", - "webpki", -] - -[[package]] -name = "tokio-rustls" -version = "0.12.3" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3068d891551949b37681724d6b73666787cc63fa8e255c812a41d2513aff9775" +checksum = "15cb62a0d2770787abc96e99c1cd98fcf17f94959f3af63ca85bdfb203f051b4" dependencies = [ "futures-core", "rustls", @@ -4144,15 +4255,6 @@ dependencies = [ "webpki", ] -[[package]] -name = "tokio-service" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24da22d077e0f15f55162bdbdc661228c1581892f52074fb242678d015b45162" -dependencies = [ - "futures 0.1.31", -] - [[package]] name = "tokio-sync" version = "0.1.8" @@ -4248,7 +4350,39 @@ dependencies = [ "bytes 1.5.0", "futures-core", "futures-sink", +<<<<<<< HEAD "pin-project-lite", +======= + "log", + "pin-project-lite 0.1.12", + "tokio 0.2.25", +] + +[[package]] +name = "tokio-util" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be8242891f2b6cbef26a2d7e8605133c2c554cd35b3e4948ea892d6d68436499" +dependencies = [ + "bytes 0.5.6", + "futures-core", + "futures-sink", + "log", + "pin-project-lite 0.1.12", + "tokio 0.2.25", +] + +[[package]] +name = "tokio-util" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +dependencies = [ + "bytes 1.5.0", + "futures-core", + "futures-sink", + "pin-project-lite 0.2.13", +>>>>>>> fc1a273 (update node apis future and hyper) "tokio 1.32.0", "tracing", ] @@ -4285,7 +4419,7 @@ checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" dependencies = [ "cfg-if 1.0.0", "log", - "pin-project-lite 0.2.12", + "pin-project-lite 0.2.13", "tracing-core", ] @@ -4518,17 +4652,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "want" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6395efa4784b027708f7451087e647ec73cc74f5d9bc2e418404248d679a230" -dependencies = [ - "futures 0.1.31", - "log", - "try-lock", -] - [[package]] name = "want" version = "0.3.1" @@ -4544,12 +4667,6 @@ version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -4577,7 +4694,11 @@ dependencies = [ "once_cell", "proc-macro2 1.0.67", "quote 1.0.33", +<<<<<<< HEAD "syn 2.0.37", +======= + "syn 2.0.31", +>>>>>>> fc1a273 (update node apis future and hyper) "wasm-bindgen-shared", ] @@ -4611,7 +4732,11 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2 1.0.67", "quote 1.0.33", +<<<<<<< HEAD "syn 2.0.37", +======= + "syn 2.0.31", +>>>>>>> fc1a273 (update node apis future and hyper) "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4675,15 +4800,6 @@ dependencies = [ "untrusted", ] -[[package]] -name = "webpki-roots" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a262ae37dd9d60f60dd473d1158f9fbebf110ba7b6a5051c8160460f6043718b" -dependencies = [ - "webpki", -] - [[package]] name = "which" version = "1.0.5" @@ -4902,7 +5018,11 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2 1.0.67", "quote 1.0.33", +<<<<<<< HEAD "syn 2.0.37", +======= + "syn 2.0.31", +>>>>>>> fc1a273 (update node apis future and hyper) ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 70182b62..2f800b42 100755 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,6 +39,7 @@ ctrlc = { version = "3.1", features = ["termination"] } cursive_table_view = "0.13.2" humansize = "1.1.0" serde = "1" +futures = "0.3.19" serde_json = "1" log = "0.4" term = "0.5" diff --git a/api/Cargo.toml b/api/Cargo.toml index 742a6432..37c4100a 100755 --- a/api/Cargo.toml +++ b/api/Cargo.toml @@ -10,32 +10,34 @@ workspace = ".." edition = "2018" [dependencies] -easy-jsonrpc-mw = "0.5.3" -failure = "0.1.1" -failure_derive = "0.1.1" -hyper = "0.12" +easy-jsonrpc-mw = "0.5.4" + +hyper = { version = "0.13"} lazy_static = "1" regex = "1" - +ring = "^0.16.20" serde = "1" serde_derive = "1" serde_json = "1" +thiserror = "1" log = "0.4" -tokio = "0.1" -tokio-core = "0.1.17" -tokio-tcp = "0.1" +tokio = { version = "0.2", features = ["full"] } +tokio-rustls = "0.13" -http = "0.1.5" -hyper-timeout = "0.2" -futures = "0.1.21" +http = "0.2" +hyper-timeout = "0.3" +futures = "0.3" -url = "1.7.0" +url = "2.1" bigint = "4.4.1" -ring = "^0.16.20" -hyper-rustls = "0.17.1" -rustls = "0.16.0" -tokio-rustls = "^0.10" + +hyper-rustls = "0.20" +rustls = "0.17" + +bytes = "0.5" + + epic_core = { path = "../core", version = "3.4.0" } epic_chain = { path = "../chain", version = "3.4.0" } diff --git a/api/src/auth.rs b/api/src/auth.rs index c77a1513..625421e9 100644 --- a/api/src/auth.rs +++ b/api/src/auth.rs @@ -14,6 +14,7 @@ use crate::router::{Handler, HandlerObj, ResponseFuture}; use crate::web::response; + use futures::future::ok; use hyper::header::{HeaderValue, AUTHORIZATION, WWW_AUTHENTICATE}; use hyper::{Body, Request, Response, StatusCode}; @@ -128,7 +129,7 @@ impl Handler for BasicAuthURIMiddleware { unauthorized_response(&self.basic_realm) } } else { - return next_handler.call(req, handlers); + next_handler.call(req, handlers) } } } @@ -139,5 +140,5 @@ fn unauthorized_response(basic_realm: &HeaderValue) -> ResponseFuture { .header(WWW_AUTHENTICATE, basic_realm) .body(Body::empty()) .unwrap(); - Box::new(ok(response)) + Box::pin(ok(response)) } diff --git a/api/src/client.rs b/api/src/client.rs index 2f5361ad..4b44bef6 100644 --- a/api/src/client.rs +++ b/api/src/client.rs @@ -14,52 +14,80 @@ //! High level JSON/HTTP client API -use crate::rest::{Error, ErrorKind}; +use crate::rest::Error; use crate::util::to_base64; -use failure::{Fail, ResultExt}; -use futures::future::{err, ok, Either}; -use http::uri::{InvalidUri, Uri}; +use hyper::body; use hyper::header::{ACCEPT, AUTHORIZATION, CONTENT_TYPE, USER_AGENT}; -use hyper::rt::{Future, Stream}; +use hyper::http::uri::{InvalidUri, Uri}; use hyper::{Body, Client, Request}; use hyper_rustls; use hyper_timeout::TimeoutConnector; use serde::{Deserialize, Serialize}; use serde_json; use std::time::Duration; -use tokio::runtime::Runtime; +use tokio::runtime::Builder; -pub type ClientResponseFuture = Box + Send>; +// Client Request Timeout +pub struct TimeOut { + pub connect: Duration, + pub read: Duration, + pub write: Duration, +} + +impl TimeOut { + pub fn new(connect: u64, read: u64, write: u64) -> Self { + Self { + connect: Duration::from_secs(connect), + read: Duration::from_secs(read), + write: Duration::from_secs(write), + } + } +} + +impl Default for TimeOut { + fn default() -> TimeOut { + TimeOut { + connect: Duration::from_secs(20), + read: Duration::from_secs(20), + write: Duration::from_secs(20), + } + } +} /// Helper function to easily issue a HTTP GET request against a given URL that /// returns a JSON object. Handles request building, JSON deserialization and /// response code checking. -pub fn get<'a, T>(url: &'a str, api_secret: Option) -> Result +/// This function spawns a new Tokio runtime, which means it is pretty inefficient for multiple +/// requests. In those situations you are probably better off creating a runtime once and spawning +/// `get_async` tasks on it +pub fn get(url: &str, api_secret: Option) -> Result where for<'de> T: Deserialize<'de>, { - handle_request(build_request(url, "GET", api_secret, None)?) + handle_request( + build_request(url, "GET", api_secret, None)?, + TimeOut::default(), + ) } /// Helper function to easily issue an async HTTP GET request against a given /// URL that returns a future. Handles request building, JSON deserialization /// and response code checking. -pub fn get_async<'a, T>(url: &'a str, api_secret: Option) -> ClientResponseFuture +pub async fn get_async(url: &str, api_secret: Option) -> Result where for<'de> T: Deserialize<'de> + Send + 'static, { - match build_request(url, "GET", api_secret, None) { - Ok(req) => Box::new(handle_request_async(req)), - Err(e) => Box::new(err(e)), - } + handle_request_async(build_request(url, "GET", api_secret, None)?).await } /// Helper function to easily issue a HTTP GET request /// on a given URL that returns nothing. Handles request /// building and response code checking. pub fn get_no_ret(url: &str, api_secret: Option) -> Result<(), Error> { - let req = build_request(url, "GET", api_secret, None)?; - send_request(req)?; + send_request( + build_request(url, "GET", api_secret, None)?, + TimeOut::default(), + )?; Ok(()) } @@ -67,33 +95,35 @@ pub fn get_no_ret(url: &str, api_secret: Option) -> Result<(), Error> { /// object as body on a given URL that returns a JSON object. Handles request /// building, JSON serialization and deserialization, and response code /// checking. -pub fn post(url: &str, api_secret: Option, input: &IN) -> Result +pub fn post( + url: &str, + api_secret: Option, + input: &IN, + timeout: TimeOut, +) -> Result where IN: Serialize, for<'de> OUT: Deserialize<'de>, { let req = create_post_request(url, api_secret, input)?; - handle_request(req) + handle_request(req, timeout) } /// Helper function to easily issue an async HTTP POST request with the /// provided JSON object as body on a given URL that returns a future. Handles /// request building, JSON serialization and deserialization, and response code /// checking. -pub fn post_async( +pub async fn post_async( url: &str, input: &IN, api_secret: Option, -) -> ClientResponseFuture +) -> Result where IN: Serialize, OUT: Send + 'static, for<'de> OUT: Deserialize<'de>, { - match create_post_request(url, api_secret, input) { - Ok(req) => Box::new(handle_request_async(req)), - Err(e) => Box::new(err(e)), - } + handle_request_async(create_post_request(url, api_secret, input)?).await } /// Helper function to easily issue a HTTP POST request with the provided JSON @@ -104,8 +134,10 @@ pub fn post_no_ret(url: &str, api_secret: Option, input: &IN) -> Res where IN: Serialize, { - let req = create_post_request(url, api_secret, input)?; - send_request(req)?; + send_request( + create_post_request(url, api_secret, input)?, + TimeOut::default(), + )?; Ok(()) } @@ -113,18 +145,20 @@ where /// provided JSON object as body on a given URL that returns a future. Handles /// request building, JSON serialization and deserialization, and response code /// checking. -pub fn post_no_ret_async( +pub async fn post_no_ret_async( url: &str, api_secret: Option, input: &IN, -) -> ClientResponseFuture<()> +) -> Result<(), Error> where IN: Serialize, { - match create_post_request(url, api_secret, input) { - Ok(req) => Box::new(send_request_async(req).and_then(|_| ok(()))), - Err(e) => Box::new(err(e)), - } + send_request_async( + create_post_request(url, api_secret, input)?, + TimeOut::default(), + ) + .await?; + Ok(()) } fn build_request( @@ -133,29 +167,33 @@ fn build_request( api_secret: Option, body: Option, ) -> Result, Error> { - let uri = url.parse::().map_err::(|e: InvalidUri| { - e.context(ErrorKind::Argument(format!("Invalid url {}", url))) - .into() - })?; - let mut builder = Request::builder(); - if let Some(api_secret) = api_secret { - let basic_auth = format!("Basic {}", to_base64(&format!("epic:{}", api_secret))); - builder.header(AUTHORIZATION, basic_auth); - } - - builder + let uri = url + .parse::() + .map_err::(|_e: InvalidUri| Error::Argument(format!("Invalid url {}", url)))?; + let mut request = hyper::Request::builder() .method(method) .uri(uri) .header(USER_AGENT, "epic-client") .header(ACCEPT, "application/json") .header(CONTENT_TYPE, "application/json") .body(match body { - None => Body::empty(), + None => hyper::Body::empty(), Some(json) => json.into(), }) - .map_err(|e| { - ErrorKind::RequestError(format!("Bad request {} {}: {}", method, url, e)).into() - }) + .unwrap(); + + /* + .map_err(|e| { + Error::RequestError(format!("Bad request {} {}: {}", method, url, e)).into() + });*/ + if let Some(api_secret) = api_secret { + let basic_auth = format!("Basic {}", to_base64(&format!("epic:{}", api_secret))); + request + .headers_mut() + .insert(AUTHORIZATION, basic_auth.parse().unwrap()); + } + + Ok(request) } pub fn create_post_request( @@ -166,72 +204,69 @@ pub fn create_post_request( where IN: Serialize, { - let json = serde_json::to_string(input).context(ErrorKind::Internal( - "Could not serialize data to JSON".to_owned(), - ))?; + let json = serde_json::to_string(input) + .map_err(|_e| Error::Internal("Could not serialize data to JSON".to_owned()))?; build_request(url, "POST", api_secret, Some(json)) } -fn handle_request(req: Request) -> Result +fn handle_request(req: Request, timeout: TimeOut) -> Result where for<'de> T: Deserialize<'de>, { - let data = send_request(req)?; - serde_json::from_str(&data).map_err(|e| { - e.context(ErrorKind::ResponseError("Cannot parse response".to_owned())) - .into() - }) + let data = send_request(req, timeout)?; + serde_json::from_str(&data) + .map_err(|e| Error::ResponseError(format!("Cannot parse response {}", e))) } -fn handle_request_async(req: Request) -> ClientResponseFuture +async fn handle_request_async(req: Request) -> Result where for<'de> T: Deserialize<'de> + Send + 'static, { - Box::new(send_request_async(req).and_then(|data| { - serde_json::from_str(&data).map_err(|e| { - e.context(ErrorKind::ResponseError("Cannot parse response".to_owned())) - .into() - }) - })) + let data = send_request_async(req, TimeOut::default()).await?; + let ser = serde_json::from_str(&data) + .map_err(|e| Error::ResponseError(format!("Cannot parse response {}", e)))?; + Ok(ser) } -fn send_request_async(req: Request) -> Box + Send> { - let https = hyper_rustls::HttpsConnector::new(1); +async fn send_request_async(req: Request, timeout: TimeOut) -> Result { + let https = hyper_rustls::HttpsConnector::new(); + let (connect, read, write) = ( + Some(timeout.connect), + Some(timeout.read), + Some(timeout.write), + ); let mut connector = TimeoutConnector::new(https); - connector.set_connect_timeout(Some(Duration::from_secs(20))); - connector.set_read_timeout(Some(Duration::from_secs(20))); - connector.set_write_timeout(Some(Duration::from_secs(20))); - let client = Client::builder().build::<_, hyper::Body>(connector); - Box::new( - client - .request(req) - .map_err(|e| ErrorKind::RequestError(format!("Cannot make request: {}", e)).into()) - .and_then(|resp| { - if !resp.status().is_success() { - Either::A(err(ErrorKind::RequestError(format!( - "Wrong response code: {} with data {:?}", - resp.status(), - resp.body() - )) - .into())) - } else { - Either::B( - resp.into_body() - .map_err(|e| { - ErrorKind::RequestError(format!("Cannot read response body: {}", e)) - .into() - }) - .concat2() - .and_then(|ch| ok(String::from_utf8_lossy(&ch.to_vec()).to_string())), - ) - } - }), - ) + connector.set_connect_timeout(connect); + connector.set_read_timeout(read); + connector.set_write_timeout(write); + let client = Client::builder().build::<_, Body>(connector); + + let resp = client + .request(req) + .await + .map_err(|e| Error::RequestError(format!("Cannot make request: {}", e)))?; + + if !resp.status().is_success() { + return Err(Error::RequestError(format!( + "Wrong response code: {} with data {:?}", + resp.status(), + resp.body() + )) + .into()); + } + + let raw = body::to_bytes(resp) + .await + .map_err(|e| Error::RequestError(format!("Cannot read response body: {}", e)))?; + + Ok(String::from_utf8_lossy(&raw).to_string()) } -pub fn send_request(req: Request) -> Result { - let task = send_request_async(req); - let mut rt = - Runtime::new().context(ErrorKind::Internal("can't create Tokio runtime".to_owned()))?; - Ok(rt.block_on(task)?) +pub fn send_request(req: Request, timeout: TimeOut) -> Result { + let mut rt = Builder::new() + .basic_scheduler() + .enable_all() + .build() + .map_err(|e| Error::RequestError(format!("{}", e)))?; + rt.block_on(send_request_async(req, timeout)) } diff --git a/api/src/foreign.rs b/api/src/foreign.rs index de7521f7..f55d6e0f 100644 --- a/api/src/foreign.rs +++ b/api/src/foreign.rs @@ -14,7 +14,6 @@ //! Foreign API External Definition -use epic_core::core::TxKernel; use crate::chain::{Chain, SyncState}; use crate::core::core::hash::Hash; use crate::core::core::transaction::Transaction; @@ -23,13 +22,14 @@ use crate::handlers::chain_api::{ChainHandler, KernelHandler, OutputHandler}; use crate::handlers::pool_api::PoolHandler; use crate::handlers::transactions_api::TxHashSetHandler; use crate::handlers::version_api::VersionHandler; -use crate::pool::{self, PoolEntry}; +use crate::pool::{self, BlockChain, PoolAdapter, PoolEntry}; use crate::rest::*; use crate::types::{ BlockHeaderPrintable, BlockPrintable, LocatedTxKernel, OutputListing, OutputPrintable, Tip, Version, }; use crate::util::RwLock; +use epic_core::core::TxKernel; use std::sync::Weak; /// Main interface into all node API functions. @@ -39,13 +39,21 @@ use std::sync::Weak; /// Methods in this API are intended to be 'single use'. /// -pub struct Foreign { +pub struct Foreign +where + B: BlockChain, + P: PoolAdapter, +{ pub chain: Weak, - pub tx_pool: Weak>, + pub tx_pool: Weak>>, pub sync_state: Weak, } -impl Foreign { +impl Foreign +where + B: BlockChain, + P: PoolAdapter, +{ /// Create a new API instance with the chain, transaction pool, peers and `sync_state`. All subsequent /// API calls will operate on this instance of node API. /// @@ -61,7 +69,7 @@ impl Foreign { pub fn new( chain: Weak, - tx_pool: Weak>, + tx_pool: Weak>>, sync_state: Weak, ) -> Self { Foreign { @@ -184,17 +192,14 @@ impl Foreign { kernel_handler.get_kernel_v2(excess, min_height, max_height) } - pub fn get_last_n_kernels( - &self, - distance: u64, - ) -> Result, Error>{ + pub fn get_last_n_kernels(&self, distance: u64) -> Result, Error> { let kernel_handler = KernelHandler { - chain: self.chain.clone() + chain: self.chain.clone(), }; let kernels = kernel_handler.get_last_n_kernels(distance); match kernels { Ok(k) => Ok(k), - Err(k) => Err(k) + Err(k) => Err(k), } } diff --git a/api/src/foreign_rpc.rs b/api/src/foreign_rpc.rs index 0aa1dd4a..8a77b3b5 100644 --- a/api/src/foreign_rpc.rs +++ b/api/src/foreign_rpc.rs @@ -17,8 +17,9 @@ use crate::core::core::hash::Hash; use crate::core::core::transaction::Transaction; use crate::foreign::Foreign; -use crate::pool::PoolEntry; -use crate::rest::ErrorKind; + +use crate::pool::{BlockChain, PoolAdapter, PoolEntry}; +use crate::rest::Error; use crate::types::{ BlockHeaderPrintable, BlockPrintable, LocatedTxKernel, OutputListing, OutputPrintable, Tip, Version, @@ -124,7 +125,7 @@ pub trait ForeignRpc: Sync + Send { height: Option, hash: Option, commit: Option, - ) -> Result; + ) -> Result; /** Networked version of [Foreign::get_block](struct.Node.html#method.get_block). @@ -242,7 +243,7 @@ pub trait ForeignRpc: Sync + Send { height: Option, hash: Option, commit: Option, - ) -> Result; + ) -> Result; /* # Json rpc example @@ -264,7 +265,7 @@ pub trait ForeignRpc: Sync + Send { end_height: Option, hash: Option, commit: Option, - ) -> Result, ErrorKind>; + ) -> Result, Error>; /** Networked version of [Foreign::get_version](struct.Node.html#method.get_version). @@ -297,7 +298,7 @@ pub trait ForeignRpc: Sync + Send { # ); ``` */ - fn get_version(&self) -> Result; + fn get_version(&self) -> Result; /** Networked version of [Foreign::get_tip](struct.Node.html#method.get_tip). @@ -332,7 +333,7 @@ pub trait ForeignRpc: Sync + Send { # ); ``` */ - fn get_tip(&self) -> Result; + fn get_tip(&self) -> Result; /** Networked version of [Foreign::get_kernel](struct.Node.html#method.get_kernel). @@ -375,7 +376,7 @@ pub trait ForeignRpc: Sync + Send { excess: String, min_height: Option, max_height: Option, - ) -> Result; + ) -> Result; /* # Json rpc example @@ -410,7 +411,7 @@ pub trait ForeignRpc: Sync + Send { ``` */ - fn get_last_n_kernels(&self, distance: u64) -> Result, ErrorKind>; + fn get_last_n_kernels(&self, distance: u64) -> Result, Error>; /** Networked version of [Foreign::get_outputs](struct.Node.html#method.get_outputs). @@ -497,7 +498,7 @@ pub trait ForeignRpc: Sync + Send { end_height: Option, include_proof: Option, include_merkle_proof: Option, - ) -> Result, ErrorKind>; + ) -> Result, Error>; /** Networked version of [Foreign::get_unspent_outputs](struct.Node.html#method.get_unspent_outputs). @@ -558,7 +559,7 @@ pub trait ForeignRpc: Sync + Send { end_index: Option, max: u64, include_proof: Option, - ) -> Result; + ) -> Result; /** Networked version of [Foreign::get_pmmr_indices](struct.Node.html#method.get_pmmr_indices). @@ -595,7 +596,7 @@ pub trait ForeignRpc: Sync + Send { &self, start_block_height: u64, end_block_height: Option, - ) -> Result; + ) -> Result; /** Networked version of [Foreign::get_pool_size](struct.Node.html#method.get_pool_size). @@ -625,7 +626,7 @@ pub trait ForeignRpc: Sync + Send { # ); ``` */ - fn get_pool_size(&self) -> Result; + fn get_pool_size(&self) -> Result; /** Networked version of [Foreign::get_stempool_size](struct.Node.html#method.get_stempool_size). @@ -655,7 +656,7 @@ pub trait ForeignRpc: Sync + Send { # ); ``` */ - fn get_stempool_size(&self) -> Result; + fn get_stempool_size(&self) -> Result; /** Networked version of [Foreign::get_unconfirmed_transactions](struct.Node.html#method.get_unconfirmed_transactions). @@ -728,7 +729,7 @@ pub trait ForeignRpc: Sync + Send { # ); ``` */ - fn get_unconfirmed_transactions(&self) -> Result, ErrorKind>; + fn get_unconfirmed_transactions(&self) -> Result, Error>; /** Networked version of [Foreign::push_transaction](struct.Node.html#method.push_transaction). @@ -793,23 +794,27 @@ pub trait ForeignRpc: Sync + Send { # ); ``` */ - fn push_transaction(&self, tx: Transaction, fluff: Option) -> Result<(), ErrorKind>; + fn push_transaction(&self, tx: Transaction, fluff: Option) -> Result<(), Error>; } -impl ForeignRpc for Foreign { +impl ForeignRpc for Foreign +where + B: BlockChain, + P: PoolAdapter, +{ fn get_header( &self, height: Option, hash: Option, commit: Option, - ) -> Result { + ) -> Result { let mut parsed_hash: Option = None; if let Some(hash) = hash { let vec = util::from_hex(hash) - .map_err(|e| ErrorKind::Argument(format!("invalid block hash: {}", e)))?; + .map_err(|e| Error::Argument(format!("invalid block hash: {}", e)))?; parsed_hash = Some(Hash::from_vec(&vec)); } - Foreign::get_header(self, height, parsed_hash, commit).map_err(|e| e.kind().clone()) + Foreign::get_header(self, height, parsed_hash, commit) } fn get_block( @@ -817,14 +822,14 @@ impl ForeignRpc for Foreign { height: Option, hash: Option, commit: Option, - ) -> Result { + ) -> Result { let mut parsed_hash: Option = None; if let Some(hash) = hash { let vec = util::from_hex(hash) - .map_err(|e| ErrorKind::Argument(format!("invalid block hash: {}", e)))?; + .map_err(|e| Error::Argument(format!("invalid block hash: {}", e)))?; parsed_hash = Some(Hash::from_vec(&vec)); } - Foreign::get_block(self, height, parsed_hash, commit).map_err(|e| e.kind().clone()) + Foreign::get_block(self, height, parsed_hash, commit) } fn get_blocks( @@ -833,24 +838,24 @@ impl ForeignRpc for Foreign { end_height: Option, hash: Option, commit: Option, - ) -> Result, ErrorKind> { + ) -> Result, Error> { if Some(start_height) > Some(end_height) { - return Err(ErrorKind::Argument( + return Err(Error::Argument( "Start_height must be lower or equal than end_height".to_string(), )); } let mut parsed_hash: Option = None; if let Some(hash) = hash { let vec = util::from_hex(hash) - .map_err(|e| ErrorKind::Argument(format!("invalid block hash: {}", e)))?; + .map_err(|e| Error::Argument(format!("invalid block hash: {}", e)))?; parsed_hash = Some(Hash::from_vec(&vec)); } if let Some(start_height) = start_height { if let Some(end_height) = end_height { let mut blocks: Vec = vec![]; for height in start_height..=end_height { - let block = Foreign::get_block(self, Some(height), parsed_hash, commit.clone()) - .map_err(|e| e.kind().clone()); + let block = Foreign::get_block(self, Some(height), parsed_hash, commit.clone()); + match block { Ok(b) => blocks.push(b), Err(_) => (), @@ -859,24 +864,24 @@ impl ForeignRpc for Foreign { return Ok(blocks); } } - return Err(ErrorKind::Argument( + return Err(Error::Argument( "Start_height or end_height is not valid".to_string(), )); } - fn get_last_n_kernels(&self, distance: u64) -> Result, ErrorKind> { + fn get_last_n_kernels(&self, distance: u64) -> Result, Error> { match Foreign::get_last_n_kernels(self, distance) { Ok(k) => Ok(k), - Err(_) => Err(ErrorKind::Argument("Could not get kernels".to_string())), + Err(_) => Err(Error::Argument("Could not get kernels".to_string())), } } - fn get_version(&self) -> Result { - Foreign::get_version(self).map_err(|e| e.kind().clone()) + fn get_version(&self) -> Result { + Foreign::get_version(self) } - fn get_tip(&self) -> Result { - Foreign::get_tip(self).map_err(|e| e.kind().clone()) + fn get_tip(&self) -> Result { + Foreign::get_tip(self) } fn get_kernel( @@ -884,8 +889,8 @@ impl ForeignRpc for Foreign { excess: String, min_height: Option, max_height: Option, - ) -> Result { - Foreign::get_kernel(self, excess, min_height, max_height).map_err(|e| e.kind().clone()) + ) -> Result { + Foreign::get_kernel(self, excess, min_height, max_height) } fn get_outputs( @@ -895,7 +900,7 @@ impl ForeignRpc for Foreign { end_height: Option, include_proof: Option, include_merkle_proof: Option, - ) -> Result, ErrorKind> { + ) -> Result, Error> { Foreign::get_outputs( self, commits, @@ -904,7 +909,6 @@ impl ForeignRpc for Foreign { include_proof, include_merkle_proof, ) - .map_err(|e| e.kind().clone()) } fn get_unspent_outputs( @@ -913,33 +917,31 @@ impl ForeignRpc for Foreign { end_index: Option, max: u64, include_proof: Option, - ) -> Result { + ) -> Result { Foreign::get_unspent_outputs(self, start_index, end_index, max, include_proof) - .map_err(|e| e.kind().clone()) } fn get_pmmr_indices( &self, start_block_height: u64, end_block_height: Option, - ) -> Result { + ) -> Result { Foreign::get_pmmr_indices(self, start_block_height, end_block_height) - .map_err(|e| e.kind().clone()) } - fn get_pool_size(&self) -> Result { - Foreign::get_pool_size(self).map_err(|e| e.kind().clone()) + fn get_pool_size(&self) -> Result { + Foreign::get_pool_size(self) } - fn get_stempool_size(&self) -> Result { - Foreign::get_stempool_size(self).map_err(|e| e.kind().clone()) + fn get_stempool_size(&self) -> Result { + Foreign::get_stempool_size(self) } - fn get_unconfirmed_transactions(&self) -> Result, ErrorKind> { - Foreign::get_unconfirmed_transactions(self).map_err(|e| e.kind().clone()) + fn get_unconfirmed_transactions(&self) -> Result, Error> { + Foreign::get_unconfirmed_transactions(self) } - fn push_transaction(&self, tx: Transaction, fluff: Option) -> Result<(), ErrorKind> { - Foreign::push_transaction(self, tx, fluff).map_err(|e| e.kind().clone()) + fn push_transaction(&self, tx: Transaction, fluff: Option) -> Result<(), Error> { + Foreign::push_transaction(self, tx, fluff) } } diff --git a/api/src/handlers.rs b/api/src/handlers.rs index a5defe48..f4f06cac 100644 --- a/api/src/handlers.rs +++ b/api/src/handlers.rs @@ -50,33 +50,43 @@ use crate::owner_rpc::OwnerRpc; use crate::p2p; use crate::pool; use crate::rest::{ApiServer, Error, TLSConfig}; -use crate::router::ResponseFuture; -use crate::router::{Router, RouterError}; + +use crate::router::{ResponseFuture, Router}; use crate::util::to_base64; use crate::util::RwLock; +use crate::util::StopState; use crate::web::*; use easy_jsonrpc_mw::{Handler, MaybeReply}; -use futures::future::ok; -use futures::Future; + +use futures::channel::oneshot; + use hyper::{Body, Request, Response, StatusCode}; -use serde::Serialize; + use std::net::SocketAddr; use std::sync::{Arc, Weak}; +use crate::pool::{BlockChain, PoolAdapter}; + +use std::thread; + /// Listener version, providing same API but listening for requests on a /// port and wrapping the calls -pub fn node_apis( +pub fn node_apis( addr: &str, chain: Arc, - tx_pool: Arc>, + tx_pool: Arc>>, peers: Arc, sync_state: Arc, api_secret: Option, foreign_api_secret: Option, tls_config: Option, -) -> Result<(), Error> { - // Manually build router when getting rid of v1 - //let mut router = Router::new(); + api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>), + stop_state: Arc, +) -> Result<(), Error> +where + B: BlockChain + 'static, + P: PoolAdapter + 'static, +{ let mut router = build_router( chain.clone(), tx_pool.clone(), @@ -85,10 +95,12 @@ pub fn node_apis( ) .expect("unable to build API router"); - // Add basic auth to v1 API and owner v2 API + //let mut router = Router::new(); + + // Add basic auth to v2 owner API if let Some(api_secret) = api_secret { let api_basic_auth = - "Basic ".to_string() + &to_base64(&("epic:".to_string() + &api_secret)); + "Basic ".to_string() + &to_base64(&("grin:".to_string() + &api_secret)); let basic_auth_middleware = Arc::new(BasicAuthMiddleware::new( api_basic_auth, &EPIC_BASIC_REALM, @@ -97,17 +109,17 @@ pub fn node_apis( router.add_middleware(basic_auth_middleware); } - let api_handler_v2 = OwnerAPIHandlerV2::new( + let api_handler = OwnerAPIHandlerV2::new( Arc::downgrade(&chain), Arc::downgrade(&peers), Arc::downgrade(&sync_state), ); - router.add_route("/v2/owner", Arc::new(api_handler_v2))?; + router.add_route("/v2/owner", Arc::new(api_handler))?; - // Add basic auth to v2 foreign API only + // Add basic auth to v2 foreign API if let Some(api_secret) = foreign_api_secret { let api_basic_auth = - "Basic ".to_string() + &to_base64(&("epic:".to_string() + &api_secret)); + "Basic ".to_string() + &to_base64(&("grin:".to_string() + &api_secret)); let basic_auth_middleware = Arc::new(BasicAuthURIMiddleware::new( api_basic_auth, &EPIC_FOREIGN_BASIC_REALM, @@ -116,19 +128,33 @@ pub fn node_apis( router.add_middleware(basic_auth_middleware); } - let api_handler_v2 = ForeignAPIHandlerV2::new( + let api_handler = ForeignAPIHandlerV2::new( Arc::downgrade(&chain), Arc::downgrade(&tx_pool), Arc::downgrade(&sync_state), ); - router.add_route("/v2/foreign", Arc::new(api_handler_v2))?; + router.add_route("/v2/foreign", Arc::new(api_handler))?; let mut apis = ApiServer::new(); - info!("Starting HTTP Node APIs server at {}.", addr); + warn!("Starting HTTP Node APIs server at {}.", addr); let socket_addr: SocketAddr = addr.parse().expect("unable to parse socket address"); - let api_thread = apis.start(socket_addr, router, tls_config); - - info!("HTTP Node listener started."); + let api_thread = apis.start(socket_addr, router, tls_config, api_chan); + + warn!("HTTP Node listener started."); + + thread::Builder::new() + .name("api_monitor".to_string()) + .spawn(move || { + // monitor for stop state is_stopped + loop { + std::thread::sleep(std::time::Duration::from_millis(100)); + if stop_state.is_stopped() { + apis.stop(); + break; + } + } + }) + .ok(); match api_thread { Ok(_) => Ok(()), @@ -139,8 +165,6 @@ pub fn node_apis( } } -type NodeResponseFuture = Box, Error = Error> + Send>; - /// V2 API Handler/Wrapper for owner functions pub struct OwnerAPIHandlerV2 { pub chain: Weak, @@ -157,67 +181,64 @@ impl OwnerAPIHandlerV2 { sync_state, } } +} - fn call_api( - &self, - req: Request, - api: Owner, - ) -> Box + Send> { - Box::new(parse_body(req).and_then(move |val: serde_json::Value| { - let owner_api = &api as &dyn OwnerRpc; - match owner_api.handle_request(val) { - MaybeReply::Reply(r) => ok(r), - MaybeReply::DontReply => { - // Since it's http, we need to return something. We return [] because jsonrpc - // clients will parse it as an empty batch response. - ok(serde_json::json!([])) - } - } - })) - } - - fn handle_post_request(&self, req: Request) -> NodeResponseFuture { +impl crate::router::Handler for OwnerAPIHandlerV2 { + fn post(&self, req: Request) -> ResponseFuture { let api = Owner::new( self.chain.clone(), self.peers.clone(), self.sync_state.clone(), ); - Box::new( - self.call_api(req, api) - .and_then(|resp| ok(json_response_pretty(&resp))), - ) - } -} -impl crate::router::Handler for OwnerAPIHandlerV2 { - fn post(&self, req: Request) -> ResponseFuture { - Box::new( - self.handle_post_request(req) - .and_then(|r| ok(r)) - .or_else(|e| { + Box::pin(async move { + match parse_body(req).await { + Ok(val) => { + let owner_api = &api as &dyn OwnerRpc; + let res = match owner_api.handle_request(val) { + MaybeReply::Reply(r) => r, + MaybeReply::DontReply => { + // Since it's http, we need to return something. We return [] because jsonrpc + // clients will parse it as an empty batch response. + serde_json::json!([]) + } + }; + Ok(json_response_pretty(&res)) + } + Err(e) => { error!("Request Error: {:?}", e); - ok(create_error_response(e)) - }), - ) + Ok(create_error_response(e)) + } + } + }) } fn options(&self, _req: Request) -> ResponseFuture { - Box::new(ok(create_ok_response("{}"))) + Box::pin(async { Ok(create_ok_response("{}")) }) } } /// V2 API Handler/Wrapper for foreign functions -pub struct ForeignAPIHandlerV2 { +/// V2 API Handler/Wrapper for foreign functions +pub struct ForeignAPIHandlerV2 +where + B: BlockChain, + P: PoolAdapter, +{ pub chain: Weak, - pub tx_pool: Weak>, + pub tx_pool: Weak>>, pub sync_state: Weak, } -impl ForeignAPIHandlerV2 { +impl ForeignAPIHandlerV2 +where + B: BlockChain, + P: PoolAdapter, +{ /// Create a new foreign API handler for GET methods pub fn new( chain: Weak, - tx_pool: Weak>, + tx_pool: Weak>>, sync_state: Weak, ) -> Self { ForeignAPIHandlerV2 { @@ -226,68 +247,58 @@ impl ForeignAPIHandlerV2 { sync_state, } } +} - fn call_api( - &self, - req: Request, - api: Foreign, - ) -> Box + Send> { - Box::new(parse_body(req).and_then(move |val: serde_json::Value| { - let foreign_api = &api as &dyn ForeignRpc; - match foreign_api.handle_request(val) { - MaybeReply::Reply(r) => ok(r), - MaybeReply::DontReply => { - // Since it's http, we need to return something. We return [] because jsonrpc - // clients will parse it as an empty batch response. - ok(serde_json::json!([])) - } - } - })) - } - - fn handle_post_request(&self, req: Request) -> NodeResponseFuture { +impl crate::router::Handler for ForeignAPIHandlerV2 +where + B: BlockChain + 'static, + P: PoolAdapter + 'static, +{ + fn post(&self, req: Request) -> ResponseFuture { let api = Foreign::new( self.chain.clone(), self.tx_pool.clone(), self.sync_state.clone(), ); - Box::new( - self.call_api(req, api) - .and_then(|resp| ok(json_response_pretty(&resp))), - ) - } -} -impl crate::router::Handler for ForeignAPIHandlerV2 { - fn post(&self, req: Request) -> ResponseFuture { - Box::new( - self.handle_post_request(req) - .and_then(|r| ok(r)) - .or_else(|e| { + Box::pin(async move { + match parse_body(req).await { + Ok(val) => { + let foreign_api = &api as &dyn ForeignRpc; + let res = match foreign_api.handle_request(val) { + MaybeReply::Reply(r) => r, + MaybeReply::DontReply => { + // Since it's http, we need to return something. We return [] because jsonrpc + // clients will parse it as an empty batch response. + serde_json::json!([]) + } + }; + Ok(json_response_pretty(&res)) + } + Err(e) => { error!("Request Error: {:?}", e); - ok(create_error_response(e)) - }), - ) + Ok(create_error_response(e)) + } + } + }) } fn options(&self, _req: Request) -> ResponseFuture { - Box::new(ok(create_ok_response("{}"))) + Box::pin(async { Ok(create_ok_response("{}")) }) } } // pretty-printed version of above -fn json_response_pretty(s: &T) -> Response -where - T: Serialize, -{ - match serde_json::to_string_pretty(s) { - Ok(json) => response(StatusCode::OK, json), +fn json_response_pretty(to_string: &serde_json::Value) -> Response { + let json = serde_json::to_string_pretty(to_string); + match json { + Ok(value) => response(StatusCode::OK, value), Err(_) => response(StatusCode::INTERNAL_SERVER_ERROR, ""), } } fn create_error_response(e: Error) -> Response { - Response::builder() + hyper::Response::builder() .status(StatusCode::INTERNAL_SERVER_ERROR) .header("access-control-allow-origin", "*") .header( @@ -299,7 +310,7 @@ fn create_error_response(e: Error) -> Response { } fn create_ok_response(json: &str) -> Response { - Response::builder() + hyper::Response::builder() .status(StatusCode::OK) .header("access-control-allow-origin", "*") .header( @@ -316,9 +327,7 @@ fn create_ok_response(json: &str) -> Response { /// Whenever the status code is `StatusCode::OK` the text parameter should be /// valid JSON as the content type header will be set to `application/json' fn response>(status: StatusCode, text: T) -> Response { - let mut builder = &mut Response::builder(); - - builder = builder + let mut res = hyper::Response::builder() .status(status) .header("access-control-allow-origin", "*") .header( @@ -327,19 +336,23 @@ fn response>(status: StatusCode, text: T) -> Response { ); if status == StatusCode::OK { - builder = builder.header(hyper::header::CONTENT_TYPE, "application/json"); + res = res.header(hyper::header::CONTENT_TYPE, "application/json"); } - builder.body(text.into()).unwrap() + res.body(text.into()).unwrap() } // Legacy V1 router -pub fn build_router( +pub fn build_router( chain: Arc, - tx_pool: Arc>, + tx_pool: Arc>>, peers: Arc, sync_state: Arc, -) -> Result { +) -> Result +where + B: BlockChain + 'static, + P: PoolAdapter + 'static, +{ let route_list = vec![ "get blocks".to_string(), "get headers".to_string(), diff --git a/api/src/handlers/blocks_api.rs b/api/src/handlers/blocks_api.rs index e556b130..1210eacc 100644 --- a/api/src/handlers/blocks_api.rs +++ b/api/src/handlers/blocks_api.rs @@ -16,24 +16,26 @@ use crate::chain; use crate::core::core::hash::Hash; use crate::core::core::hash::Hashed; use crate::rest::*; -use crate::router::{Handler, ResponseFuture}; + use crate::types::*; use crate::util; use crate::web::*; -use failure::ResultExt; -use hyper::{Body, Request, StatusCode}; + use regex::Regex; use std::sync::Weak; +use crate::router::{Handler, ResponseFuture}; + +use hyper::{Body, Request, StatusCode}; + +pub struct HeaderHandler { + pub chain: Weak, +} /// Gets block headers given either a hash or height or an output commit. /// GET /v1/headers/ /// GET /v1/headers/ /// GET /v1/headers/ /// -pub struct HeaderHandler { - pub chain: Weak, -} - impl HeaderHandler { fn get_header(&self, input: String) -> Result { // will fail quick if the provided isn't a commitment @@ -43,16 +45,16 @@ impl HeaderHandler { if let Ok(height) = input.parse() { match w(&self.chain)?.get_header_by_height(height) { Ok(header) => return Ok(BlockHeaderPrintable::from_header(&header)), - Err(_) => return Err(ErrorKind::NotFound)?, + Err(_) => return Err(Error::NotFound)?, } } check_block_param(&input)?; - let vec = util::from_hex(input) - .map_err(|e| ErrorKind::Argument(format!("invalid input: {}", e)))?; + let vec = + util::from_hex(input).map_err(|e| Error::Argument(format!("invalid input: {}", e)))?; let h = Hash::from_vec(&vec); let header = w(&self.chain)? .get_block_header(&h) - .context(ErrorKind::NotFound)?; + .map_err(|_e| Error::NotFound)?; Ok(BlockHeaderPrintable::from_header(&header)) } @@ -60,13 +62,13 @@ impl HeaderHandler { let oid = get_output(&self.chain, &commit_id)?.1; match w(&self.chain)?.get_header_for_output(&oid) { Ok(header) => Ok(BlockHeaderPrintable::from_header(&header)), - Err(_) => Err(ErrorKind::NotFound)?, + Err(_) => Err(Error::NotFound)?, } } pub fn get_header_v2(&self, h: &Hash) -> Result { let chain = w(&self.chain)?; - let header = chain.get_block_header(h).context(ErrorKind::NotFound)?; + let header = chain.get_block_header(h).map_err(|_e| Error::NotFound)?; return Ok(BlockHeaderPrintable::from_header(&header)); } @@ -80,7 +82,7 @@ impl HeaderHandler { if let Some(height) = height { match w(&self.chain)?.get_header_by_height(height) { Ok(header) => return Ok(header.hash()), - Err(_) => return Err(ErrorKind::NotFound)?, + Err(_) => return Err(Error::NotFound)?, } } if let Some(hash) = hash { @@ -90,10 +92,10 @@ impl HeaderHandler { let oid = get_output_v2(&self.chain, &commit, false, false)?.1; match w(&self.chain)?.get_header_for_output(&oid) { Ok(header) => return Ok(header.hash()), - Err(_) => return Err(ErrorKind::NotFound)?, + Err(_) => return Err(Error::NotFound)?, } } - return Err(ErrorKind::Argument( + return Err(Error::Argument( "not a valid hash, height or output commit".to_owned(), ))?; } @@ -102,7 +104,8 @@ impl HeaderHandler { impl Handler for HeaderHandler { fn get(&self, req: Request) -> ResponseFuture { let el = right_path_element!(req); - result_to_response(self.get_header(el.to_string())) + let header = self.get_header(el.to_string()); + result_to_response(header) } } @@ -128,16 +131,16 @@ impl BlockHandler { include_merkle_proof: bool, ) -> Result { let chain = w(&self.chain)?; - let block = chain.get_block(h).context(ErrorKind::NotFound)?; + let block = chain.get_block(h).map_err(|_e| Error::NotFound)?; BlockPrintable::from_block(&block, chain, include_proof, include_merkle_proof) - .map_err(|_| ErrorKind::Internal("chain error".to_owned()).into()) + .map_err(|_| Error::Internal("chain error".to_owned()).into()) } fn get_compact_block(&self, h: &Hash) -> Result { let chain = w(&self.chain)?; - let block = chain.get_block(h).context(ErrorKind::NotFound)?; + let block = chain.get_block(h).map_err(|_e| Error::NotFound)?; CompactBlockPrintable::from_compact_block(&block.into(), chain) - .map_err(|_| ErrorKind::Internal("chain error".to_owned()).into()) + .map_err(|_| Error::Internal("chain error".to_owned()).into()) } // Try to decode the string as a height or a hash. @@ -145,12 +148,12 @@ impl BlockHandler { if let Ok(height) = input.parse() { match w(&self.chain)?.get_header_by_height(height) { Ok(header) => return Ok(header.hash()), - Err(_) => return Err(ErrorKind::NotFound)?, + Err(_) => return Err(Error::NotFound)?, } } check_block_param(&input)?; - let vec = util::from_hex(input) - .map_err(|e| ErrorKind::Argument(format!("invalid input: {}", e)))?; + let vec = + util::from_hex(input).map_err(|e| Error::Argument(format!("invalid input: {}", e)))?; Ok(Hash::from_vec(&vec)) } @@ -164,7 +167,7 @@ impl BlockHandler { if let Some(height) = height { match w(&self.chain)?.get_header_by_height(height) { Ok(header) => return Ok(header.hash()), - Err(_) => return Err(ErrorKind::NotFound)?, + Err(_) => return Err(Error::NotFound)?, } } if let Some(hash) = hash { @@ -174,10 +177,10 @@ impl BlockHandler { let oid = get_output_v2(&self.chain, &commit, false, false)?.1; match w(&self.chain)?.get_header_for_output(&oid) { Ok(header) => return Ok(header.hash()), - Err(_) => return Err(ErrorKind::NotFound)?, + Err(_) => return Err(Error::NotFound)?, } } - return Err(ErrorKind::Argument( + return Err(Error::Argument( "not a valid hash, height or output commit".to_owned(), ))?; } @@ -188,9 +191,7 @@ fn check_block_param(input: &String) -> Result<(), Error> { static ref RE: Regex = Regex::new(r"[0-9a-fA-F]{64}").unwrap(); } if !RE.is_match(&input) { - return Err(ErrorKind::Argument( - "Not a valid hash or height.".to_owned(), - ))?; + return Err(Error::Argument("Not a valid hash or height.".to_owned()))?; } Ok(()) } diff --git a/api/src/handlers/chain_api.rs b/api/src/handlers/chain_api.rs index 5e0b5efb..b41b91a2 100644 --- a/api/src/handlers/chain_api.rs +++ b/api/src/handlers/chain_api.rs @@ -22,7 +22,7 @@ use crate::util; use crate::util::secp::pedersen::Commitment; use crate::web::*; use epic_core::core::TxKernel; -use failure::ResultExt; + use hyper::{Body, Request, StatusCode}; use std::sync::Weak; @@ -36,7 +36,7 @@ impl ChainHandler { pub fn get_tip(&self) -> Result { let head = w(&self.chain)? .head() - .map_err(|e| ErrorKind::Internal(format!("can't get head: {}", e)))?; + .map_err(|e| Error::Internal(format!("can't get head: {}", e)))?; Ok(Tip::from_tip(head)) } } @@ -57,7 +57,7 @@ impl ChainValidationHandler { pub fn validate_chain(&self) -> Result<(), Error> { w(&self.chain)? .validate(true) - .map_err(|_| ErrorKind::Internal("chain error".to_owned()).into()) + .map_err(|_| Error::Internal("chain error".to_owned()).into()) } } @@ -84,7 +84,7 @@ impl ChainCompactHandler { pub fn compact_chain(&self) -> Result<(), Error> { w(&self.chain)? .compact() - .map_err(|_| ErrorKind::Internal("chain error".to_owned()).into()) + .map_err(|_| Error::Internal("chain error".to_owned()).into()) } } @@ -137,7 +137,7 @@ impl OutputHandler { // First check the commits length for commit in &commits { if commit.len() != 66 { - return Err(ErrorKind::RequestError(format!( + return Err(Error::RequestError(format!( "invalid commit length for {}", commit )) @@ -189,7 +189,7 @@ impl OutputHandler { let chain = w(&self.chain)?; let outputs = chain .unspent_outputs_by_pmmr_index(start_index, max, end_index) - .context(ErrorKind::NotFound)?; + .map_err(|_e| Error::NotFound)?; let out = OutputListing { last_retrieved_index: outputs.0, highest_index: outputs.1, @@ -206,7 +206,7 @@ impl OutputHandler { ) }) .collect::, _>>() - .context(ErrorKind::Internal("chain error".to_owned()))?, + .map_err(|_e| Error::Internal("chain error".to_owned()))?, }; Ok(out) } @@ -239,14 +239,14 @@ impl OutputHandler { ) -> Result { let header = w(&self.chain)? .get_header_by_height(block_height) - .map_err(|_| ErrorKind::NotFound)?; + .map_err(|_| Error::NotFound)?; // TODO - possible to compact away blocks we care about // in the period between accepting the block and refreshing the wallet let chain = w(&self.chain)?; let block = chain .get_block(&header.hash()) - .map_err(|_| ErrorKind::NotFound)?; + .map_err(|_| Error::NotFound)?; let outputs = block .outputs() .iter() @@ -261,11 +261,11 @@ impl OutputHandler { ) }) .collect::, _>>() - .context(ErrorKind::Internal("cain error".to_owned()))?; + .map_err(|_e| Error::Internal("cain error".to_owned()))?; Ok(BlockOutputs { header: BlockHeaderInfo::from_header(&header), - outputs: outputs, + outputs, }) } @@ -278,14 +278,14 @@ impl OutputHandler { ) -> Result, Error> { let header = w(&self.chain)? .get_header_by_height(block_height) - .map_err(|_| ErrorKind::NotFound)?; + .map_err(|_| Error::NotFound)?; // TODO - possible to compact away blocks we care about // in the period between accepting the block and refreshing the wallet let chain = w(&self.chain)?; let block = chain .get_block(&header.hash()) - .map_err(|_| ErrorKind::NotFound)?; + .map_err(|_| Error::NotFound)?; let outputs = block .outputs() .iter() @@ -300,7 +300,7 @@ impl OutputHandler { ) }) .collect::, _>>() - .context(ErrorKind::Internal("cain error".to_owned()))?; + .map_err(|_e| Error::Internal("cain error".to_owned()))?; Ok(outputs) } @@ -395,11 +395,11 @@ impl KernelHandler { .trim_end_matches('/') .rsplit('/') .next() - .ok_or(ErrorKind::RequestError("missing excess".into()))?; + .ok_or(Error::RequestError("missing excess".into()))?; let excess = util::from_hex(excess.to_owned()) - .map_err(|_| ErrorKind::RequestError("invalid excess hex".into()))?; + .map_err(|_| Error::RequestError("invalid excess hex".into()))?; if excess.len() != 33 { - return Err(ErrorKind::RequestError("invalid excess length".into()).into()); + return Err(Error::RequestError("invalid excess length".into()).into()); } let excess = Commitment::from_vec(excess); @@ -414,18 +414,18 @@ impl KernelHandler { if let Some(h) = params.get("min_height") { let h = h .parse() - .map_err(|_| ErrorKind::RequestError("invalid minimum height".into()))?; + .map_err(|_| Error::RequestError("invalid minimum height".into()))?; // Default is genesis min_height = if h == 0 { None } else { Some(h) }; } if let Some(h) = params.get("max_height") { let h = h .parse() - .map_err(|_| ErrorKind::RequestError("invalid maximum height".into()))?; + .map_err(|_| Error::RequestError("invalid maximum height".into()))?; // Default is current head let head_height = chain .head() - .map_err(|e| ErrorKind::Internal(format!("{}", e)))? + .map_err(|e| Error::Internal(format!("{}", e)))? .height; max_height = if h >= head_height { None } else { Some(h) }; } @@ -433,7 +433,7 @@ impl KernelHandler { let kernel = chain .get_kernel_height(&excess, min_height, max_height) - .map_err(|e| ErrorKind::Internal(format!("{}", e)))? + .map_err(|e| Error::Internal(format!("{}", e)))? .map(|(tx_kernel, height, mmr_index)| LocatedTxKernel { tx_kernel, height, @@ -449,35 +449,32 @@ impl KernelHandler { max_height: Option, ) -> Result { let excess = util::from_hex(excess.to_owned()) - .map_err(|_| ErrorKind::RequestError("invalid excess hex".into()))?; + .map_err(|_| Error::RequestError("invalid excess hex".into()))?; if excess.len() != 33 { - return Err(ErrorKind::RequestError("invalid excess length".into()).into()); + return Err(Error::RequestError("invalid excess length".into()).into()); } let excess = Commitment::from_vec(excess); let chain = w(&self.chain)?; let kernel = chain .get_kernel_height(&excess, min_height, max_height) - .map_err(|e| ErrorKind::Internal(format!("{}", e)))? + .map_err(|e| Error::Internal(format!("{}", e)))? .map(|(tx_kernel, height, mmr_index)| LocatedTxKernel { tx_kernel, height, mmr_index, }); - kernel.ok_or_else(|| ErrorKind::NotFound.into()) + kernel.ok_or_else(|| Error::NotFound.into()) } - pub fn get_last_n_kernels( - &self, - distance: u64, - ) -> Result, Error> { + pub fn get_last_n_kernels(&self, distance: u64) -> Result, Error> { let chain = w(&self.chain)?; let kernels = chain.get_last_n_kernel(distance); let mut tx_kernels: Vec = Vec::new(); for k in &kernels { let tx_kernel = k.1.clone(); tx_kernels.push(tx_kernel); - }; + } return Ok(tx_kernels); } } diff --git a/api/src/handlers/peers_api.rs b/api/src/handlers/peers_api.rs index 9469ef27..6c8f40c2 100644 --- a/api/src/handlers/peers_api.rs +++ b/api/src/handlers/peers_api.rs @@ -18,6 +18,7 @@ use crate::p2p::{self, PeerData}; use crate::rest::*; use crate::router::{Handler, ResponseFuture}; use crate::web::*; + use hyper::{Body, Request, StatusCode}; use std::net::SocketAddr; use std::sync::Weak; @@ -72,7 +73,7 @@ impl PeerHandler { if let Some(addr) = addr { let peer_addr = PeerAddr(addr); let peer_data: PeerData = w(&self.peers)?.get_peer(peer_addr).map_err(|e| { - let e: Error = ErrorKind::Internal(format!("get peer error: {:?}", e)).into(); + let e: Error = Error::Internal(format!("get peer error: {:?}", e)).into(); e })?; return Ok(vec![peer_data]); @@ -85,14 +86,14 @@ impl PeerHandler { let peer_addr = PeerAddr(addr); w(&self.peers)? .ban_peer(peer_addr, ReasonForBan::ManualBan) - .map_err(|e| ErrorKind::Internal(format!("ban peer error: {:?}", e)).into()) + .map_err(|e| Error::Internal(format!("ban peer error: {:?}", e))) } pub fn unban_peer(&self, addr: SocketAddr) -> Result<(), Error> { let peer_addr = PeerAddr(addr); w(&self.peers)? .unban_peer(peer_addr) - .map_err(|e| ErrorKind::Internal(format!("unban peer error: {:?}", e)).into()) + .map_err(|e| Error::Internal(format!("unban peer error: {:?}", e))) } } @@ -120,6 +121,7 @@ impl Handler for PeerHandler { Err(_) => response(StatusCode::NOT_FOUND, "peer not found"), } } + fn post(&self, req: Request) -> ResponseFuture { let mut path_elems = req.uri().path().trim_end_matches('/').rsplit('/'); let command = match path_elems.next() { diff --git a/api/src/handlers/pool_api.rs b/api/src/handlers/pool_api.rs index 2877739d..e871afd1 100644 --- a/api/src/handlers/pool_api.rs +++ b/api/src/handlers/pool_api.rs @@ -16,26 +16,33 @@ use super::utils::w; use crate::core::core::hash::Hashed; use crate::core::core::Transaction; use crate::core::ser::{self, ProtocolVersion}; -use crate::pool::{self, PoolEntry}; + +use crate::pool::{self, BlockChain, PoolAdapter, PoolEntry}; use crate::rest::*; use crate::router::{Handler, ResponseFuture}; use crate::types::*; use crate::util; use crate::util::RwLock; use crate::web::*; -use failure::ResultExt; -use futures::future::{err, ok}; -use futures::Future; + use hyper::{Body, Request, StatusCode}; use std::sync::Weak; /// Get basic information about the transaction pool. /// GET /v1/pool -pub struct PoolInfoHandler { - pub tx_pool: Weak>, +pub struct PoolInfoHandler +where + B: BlockChain, + P: PoolAdapter, +{ + pub tx_pool: Weak>>, } -impl Handler for PoolInfoHandler { +impl Handler for PoolInfoHandler +where + B: BlockChain, + P: PoolAdapter, +{ fn get(&self, _req: Request) -> ResponseFuture { let pool_arc = w_fut!(&self.tx_pool); let pool = pool_arc.read(); @@ -46,11 +53,19 @@ impl Handler for PoolInfoHandler { } } -pub struct PoolHandler { - pub tx_pool: Weak>, +pub struct PoolHandler +where + B: BlockChain, + P: PoolAdapter, +{ + pub tx_pool: Weak>>, } -impl PoolHandler { +impl PoolHandler +where + B: BlockChain, + P: PoolAdapter, +{ pub fn get_pool_size(&self) -> Result { let pool_arc = w(&self.tx_pool)?; let pool = pool_arc.read(); @@ -83,10 +98,10 @@ impl PoolHandler { let header = tx_pool .blockchain .chain_head() - .context(ErrorKind::Internal("Failed to get chain head".to_owned()))?; + .map_err(|_e| Error::Internal("Failed to get chain head".to_owned()))?; let res = tx_pool .add_to_pool(source, tx, !fluff.unwrap_or(false), &header) - .context(ErrorKind::Internal("Failed to update pool".to_owned()))?; + .map_err(|_e| Error::Internal("Failed to update pool".to_owned()))?; Ok(res) } } @@ -98,69 +113,71 @@ struct TxWrapper { /// Push new transaction to our local transaction pool. /// POST /v1/pool/push_tx -pub struct PoolPushHandler { - pub tx_pool: Weak>, +pub struct PoolPushHandler +where + B: BlockChain, + P: PoolAdapter, +{ + pub tx_pool: Weak>>, } -impl PoolPushHandler { - fn update_pool(&self, req: Request) -> Box + Send> { - let params = QueryParams::from(req.uri().query()); - - let fluff = params.get("fluff").is_some(); - let pool_arc = match w(&self.tx_pool) { - Ok(p) => p, - Err(e) => return Box::new(err(e)), - }; - - Box::new( - parse_body(req) - .and_then(move |wrapper: TxWrapper| { - util::from_hex(wrapper.tx_hex) - .map_err(|e| ErrorKind::RequestError(format!("Bad request: {}", e)).into()) - }) - .and_then(move |tx_bin| { - // All wallet api interaction explicitly uses protocol version 1 for now. - let version = ProtocolVersion(1); - - ser::deserialize(&mut &tx_bin[..], version) - .map_err(|e| ErrorKind::RequestError(format!("Bad request: {}", e)).into()) - }) - .and_then(move |tx: Transaction| { - let source = pool::TxSource::PushApi; - info!( - "Pushing transaction {} to pool (inputs: {}, outputs: {}, kernels: {})", - tx.hash(), - tx.inputs().len(), - tx.outputs().len(), - tx.kernels().len(), - ); - - // Push to tx pool. - let mut tx_pool = pool_arc.write(); - let header = tx_pool - .blockchain - .chain_head() - .context(ErrorKind::Internal("Failed to get chain head".to_owned()))?; - let res = tx_pool - .add_to_pool(source, tx, !fluff, &header) - .context(ErrorKind::Internal("Failed to update pool".to_owned()))?; - Ok(res) - }), - ) - } +async fn update_pool( + pool: Weak>>, + req: Request, +) -> Result<(), Error> +where + B: BlockChain, + P: PoolAdapter, +{ + let pool = w(&pool)?; + let params = QueryParams::from(req.uri().query()); + let fluff = params.get("fluff").is_some(); + + let wrapper: TxWrapper = parse_body(req).await?; + let tx_bin = util::from_hex(wrapper.tx_hex) + .map_err(|e| Error::RequestError(format!("Bad request: {}", e)))?; + + // All wallet api interaction explicitly uses protocol version 1 for now. + let version = ProtocolVersion(1); + let tx: Transaction = ser::deserialize(&mut &tx_bin[..], version) + .map_err(|e| Error::RequestError(format!("Bad request: {}", e)))?; + + let source = pool::TxSource::PushApi; + info!( + "Pushing transaction {} to pool (inputs: {}, outputs: {}, kernels: {})", + tx.hash(), + tx.inputs().len(), + tx.outputs().len(), + tx.kernels().len(), + ); + + // Push to tx pool. + let mut tx_pool = pool.write(); + let header = tx_pool + .blockchain + .chain_head() + .map_err(|e| Error::Internal(format!("Failed to get chain head: {}", e)))?; + tx_pool + .add_to_pool(source, tx, !fluff, &header) + .map_err(|e| Error::Internal(format!("Failed to update pool: {}", e)))?; + Ok(()) } -impl Handler for PoolPushHandler { +impl Handler for PoolPushHandler +where + B: BlockChain + 'static, + P: PoolAdapter + 'static, +{ fn post(&self, req: Request) -> ResponseFuture { - Box::new( - self.update_pool(req) - .and_then(|_| ok(just_response(StatusCode::OK, ""))) - .or_else(|e| { - ok(just_response( - StatusCode::INTERNAL_SERVER_ERROR, - format!("failed: {}", e), - )) - }), - ) + let pool = self.tx_pool.clone(); + Box::pin(async move { + let res = match update_pool(pool, req).await { + Ok(_) => just_response(StatusCode::OK, ""), + Err(e) => { + just_response(StatusCode::INTERNAL_SERVER_ERROR, format!("failed: {}", e)) + } + }; + Ok(res) + }) } } diff --git a/api/src/handlers/server_api.rs b/api/src/handlers/server_api.rs index 1c724596..5c81f2eb 100644 --- a/api/src/handlers/server_api.rs +++ b/api/src/handlers/server_api.rs @@ -19,6 +19,7 @@ use crate::rest::*; use crate::router::{Handler, ResponseFuture}; use crate::types::*; use crate::web::*; + use hyper::{Body, Request, StatusCode}; use serde_json::json; use std::sync::Weak; @@ -72,7 +73,7 @@ impl StatusHandler { pub fn get_status(&self) -> Result { let head = w(&self.chain)? .head() - .map_err(|e| ErrorKind::Internal(format!("can't get head: {}", e)))?; + .map_err(|e| Error::Internal(format!("can't get head: {}", e)))?; let sync_status = w(&self.sync_state)?.status(); let (api_sync_status, api_sync_info) = sync_status_to_api(sync_status); Ok(Status::from_tip_and_peers( diff --git a/api/src/handlers/transactions_api.rs b/api/src/handlers/transactions_api.rs index 585f22fa..ae4fa69f 100644 --- a/api/src/handlers/transactions_api.rs +++ b/api/src/handlers/transactions_api.rs @@ -20,8 +20,8 @@ use crate::types::*; use crate::util; use crate::util::secp::pedersen::Commitment; use crate::web::*; -use failure::ResultExt; use hyper::{Body, Request, StatusCode}; + use std::sync::Weak; // Sum tree handler. Retrieve the roots: @@ -47,9 +47,8 @@ pub struct TxHashSetHandler { impl TxHashSetHandler { // gets roots fn get_roots(&self) -> Result { - let res = TxHashSet::from_head(w(&self.chain)?).context(ErrorKind::Internal( - "failed to read roots from txhashset".to_owned(), - ))?; + let res = TxHashSet::from_head(w(&self.chain)?) + .map_err(|_e| Error::Internal("failed to read roots from txhashset".to_owned()))?; Ok(res) } @@ -85,7 +84,7 @@ impl TxHashSetHandler { let chain = w(&self.chain)?; let outputs = chain .unspent_outputs_by_pmmr_index(start_index, max, end_index) - .context(ErrorKind::NotFound)?; + .map_err(|_e| Error::NotFound)?; let out = OutputListing { last_retrieved_index: outputs.0, highest_index: outputs.1, @@ -94,7 +93,7 @@ impl TxHashSetHandler { .iter() .map(|x| OutputPrintable::from_output(x, chain.clone(), None, true, true)) .collect::, _>>() - .context(ErrorKind::Internal("chain error".to_owned()))?, + .map_err(|_e| Error::Internal("chain error".to_owned()))?, }; Ok(out) } @@ -108,7 +107,7 @@ impl TxHashSetHandler { let chain = w(&self.chain)?; let range = chain .block_height_range_to_pmmr_indices(start_block_height, end_block_height) - .context(ErrorKind::NotFound)?; + .map_err(|_e| Error::NotFound)?; let out = OutputListing { last_retrieved_index: range.0, highest_index: range.1, @@ -120,15 +119,15 @@ impl TxHashSetHandler { // return a dummy output with merkle proof for position filled out // (to avoid having to create a new type to pass around) fn get_merkle_proof_for_output(&self, id: &str) -> Result { - let c = util::from_hex(String::from(id)).context(ErrorKind::Argument(format!( - "Not a valid commitment: {}", - id - )))?; + let c = util::from_hex(String::from(id)) + .map_err(|_e| Error::Argument(format!("Not a valid commitment: {}", id)))?; let commit = Commitment::from_vec(c); let chain = w(&self.chain)?; - let output_pos = chain.get_output_pos(&commit).context(ErrorKind::NotFound)?; - let merkle_proof = chain::Chain::get_merkle_proof_for_pos(&chain, commit) - .map_err(|_| ErrorKind::NotFound)?; + let output_pos = chain + .get_output_pos(&commit) + .map_err(|_e| Error::NotFound)?; + let merkle_proof = + chain::Chain::get_merkle_proof_for_pos(&chain, commit).map_err(|_| Error::NotFound)?; Ok(OutputPrintable { output_type: OutputType::Coinbase, commit: Commitment::from_vec(vec![]), diff --git a/api/src/handlers/utils.rs b/api/src/handlers/utils.rs index 71803921..4dcaba29 100644 --- a/api/src/handlers/utils.rs +++ b/api/src/handlers/utils.rs @@ -18,7 +18,6 @@ use crate::rest::*; use crate::types::*; use crate::util; use crate::util::secp::pedersen::Commitment; -use failure::ResultExt; use std::sync::{Arc, Weak}; // All handlers use `Weak` references instead of `Arc` to avoid cycles that @@ -26,7 +25,7 @@ use std::sync::{Arc, Weak}; // boilerplate of dealing with `Weak`. pub fn w(weak: &Weak) -> Result, Error> { weak.upgrade() - .ok_or_else(|| ErrorKind::Internal("failed to upgrade weak refernce".to_owned()).into()) + .ok_or_else(|| Error::Internal("failed to upgrade weak refernce".to_owned()).into()) } /// Retrieves an output from the chain given a commit id (a tiny bit iteratively) @@ -34,10 +33,8 @@ pub fn get_output( chain: &Weak, id: &str, ) -> Result<(Output, OutputIdentifier), Error> { - let c = util::from_hex(String::from(id)).context(ErrorKind::Argument(format!( - "Not a valid commitment: {}", - id - )))?; + let c = util::from_hex(String::from(id)) + .map_err(|_e| Error::Argument(format!("Not a valid commitment: {}", id)))?; let commit = Commitment::from_vec(c); // We need the features here to be able to generate the necessary hash @@ -70,7 +67,7 @@ pub fn get_output( } } } - Err(ErrorKind::NotFound)? + Err(Error::NotFound)? } /// Retrieves an output from the chain given a commit id (a tiny bit iteratively) @@ -80,10 +77,8 @@ pub fn get_output_v2( include_proof: bool, include_merkle_proof: bool, ) -> Result<(OutputPrintable, OutputIdentifier), Error> { - let c = util::from_hex(String::from(id)).context(ErrorKind::Argument(format!( - "Not a valid commitment: {}", - id - )))?; + let c = util::from_hex(String::from(id)) + .map_err(|_e| Error::Argument(format!("Not a valid commitment: {}", id)))?; let commit = Commitment::from_vec(c); // We need the features here to be able to generate the necessary hash @@ -124,7 +119,7 @@ pub fn get_output_v2( } } } - Err(_) => return Err(ErrorKind::NotFound)?, + Err(_) => return Err(Error::NotFound)?, }, Err(e) => { trace!( @@ -136,5 +131,5 @@ pub fn get_output_v2( } } } - Err(ErrorKind::NotFound)? + Err(Error::NotFound)? } diff --git a/api/src/handlers/version_api.rs b/api/src/handlers/version_api.rs index 3d4b6d3e..90a7a1d7 100644 --- a/api/src/handlers/version_api.rs +++ b/api/src/handlers/version_api.rs @@ -33,7 +33,7 @@ impl VersionHandler { pub fn get_version(&self) -> Result { let head = w(&self.chain)? .head_header() - .map_err(|e| ErrorKind::Internal(format!("can't get head: {}", e)))?; + .map_err(|e| Error::Internal(format!("can't get head: {}", e)))?; Ok(Version { node_version: CRATE_VERSION.to_owned(), diff --git a/api/src/lib.rs b/api/src/lib.rs index c6612a7d..9adbe5ce 100644 --- a/api/src/lib.rs +++ b/api/src/lib.rs @@ -19,9 +19,6 @@ use epic_pool as pool; use epic_util as util; -use failure; -#[macro_use] -extern crate failure_derive; #[macro_use] extern crate lazy_static; diff --git a/api/src/owner_rpc.rs b/api/src/owner_rpc.rs index 55fb5ef5..e3fd5b5f 100644 --- a/api/src/owner_rpc.rs +++ b/api/src/owner_rpc.rs @@ -17,7 +17,7 @@ use crate::owner::Owner; use crate::p2p::types::PeerInfoDisplay; use crate::p2p::PeerData; -use crate::rest::ErrorKind; +use crate::rest::Error; use crate::types::Status; use std::net::SocketAddr; @@ -70,7 +70,7 @@ pub trait OwnerRpc: Sync + Send { # ); ``` */ - fn get_status(&self) -> Result; + fn get_status(&self) -> Result; /** Networked version of [Owner::validate_chain](struct.Node.html#method.validate_chain). @@ -100,7 +100,7 @@ pub trait OwnerRpc: Sync + Send { # ); ``` */ - fn validate_chain(&self) -> Result<(), ErrorKind>; + fn validate_chain(&self) -> Result<(), Error>; /** Networked version of [Owner::compact_chain](struct.Node.html#method.compact_chain). @@ -130,7 +130,7 @@ pub trait OwnerRpc: Sync + Send { # ); ``` */ - fn compact_chain(&self) -> Result<(), ErrorKind>; + fn compact_chain(&self) -> Result<(), Error>; /** Networked version of [Owner::get_peers](struct.Node.html#method.get_peers). @@ -172,7 +172,7 @@ pub trait OwnerRpc: Sync + Send { # ); ``` */ - fn get_peers(&self, peer_addr: Option) -> Result, ErrorKind>; + fn get_peers(&self, peer_addr: Option) -> Result, Error>; /** Networked version of [Owner::get_connected_peers](struct.Node.html#method.get_connected_peers). @@ -291,7 +291,7 @@ pub trait OwnerRpc: Sync + Send { # ); ``` */ - fn get_connected_peers(&self) -> Result, ErrorKind>; + fn get_connected_peers(&self) -> Result, Error>; /** Networked version of [Owner::ban_peer](struct.Node.html#method.ban_peer). @@ -321,7 +321,7 @@ pub trait OwnerRpc: Sync + Send { # ); ``` */ - fn ban_peer(&self, peer_addr: SocketAddr) -> Result<(), ErrorKind>; + fn ban_peer(&self, peer_addr: SocketAddr) -> Result<(), Error>; /** Networked version of [Owner::unban_peer](struct.Node.html#method.unban_peer). @@ -351,36 +351,36 @@ pub trait OwnerRpc: Sync + Send { # ); ``` */ - fn unban_peer(&self, peer_addr: SocketAddr) -> Result<(), ErrorKind>; + fn unban_peer(&self, peer_addr: SocketAddr) -> Result<(), Error>; } impl OwnerRpc for Owner { - fn get_status(&self) -> Result { - Owner::get_status(self).map_err(|e| e.kind().clone()) + fn get_status(&self) -> Result { + Owner::get_status(self) } - fn validate_chain(&self) -> Result<(), ErrorKind> { - Owner::validate_chain(self).map_err(|e| e.kind().clone()) + fn validate_chain(&self) -> Result<(), Error> { + Owner::validate_chain(self) } - fn compact_chain(&self) -> Result<(), ErrorKind> { - Owner::compact_chain(self).map_err(|e| e.kind().clone()) + fn compact_chain(&self) -> Result<(), Error> { + Owner::compact_chain(self) } - fn get_peers(&self, addr: Option) -> Result, ErrorKind> { - Owner::get_peers(self, addr).map_err(|e| e.kind().clone()) + fn get_peers(&self, addr: Option) -> Result, Error> { + Owner::get_peers(self, addr) } - fn get_connected_peers(&self) -> Result, ErrorKind> { - Owner::get_connected_peers(self).map_err(|e| e.kind().clone()) + fn get_connected_peers(&self) -> Result, Error> { + Owner::get_connected_peers(self) } - fn ban_peer(&self, addr: SocketAddr) -> Result<(), ErrorKind> { - Owner::ban_peer(self, addr).map_err(|e| e.kind().clone()) + fn ban_peer(&self, addr: SocketAddr) -> Result<(), Error> { + Owner::ban_peer(self, addr) } - fn unban_peer(&self, addr: SocketAddr) -> Result<(), ErrorKind> { - Owner::unban_peer(self, addr).map_err(|e| e.kind().clone()) + fn unban_peer(&self, addr: SocketAddr) -> Result<(), Error> { + Owner::unban_peer(self, addr) } } diff --git a/api/src/rest.rs b/api/src/rest.rs index 4062bf8e..2271a721 100644 --- a/api/src/rest.rs +++ b/api/src/rest.rs @@ -20,85 +20,46 @@ use crate::router::{Handler, HandlerObj, ResponseFuture, Router, RouterError}; use crate::web::response; -use failure::{Backtrace, Context, Fail, ResultExt}; -use futures::sync::oneshot; -use futures::Stream; -use hyper::rt::Future; -use hyper::{rt, Body, Request, Server, StatusCode}; -use rustls; +use futures::channel::oneshot; +use futures::TryStreamExt; +use hyper::server::accept; +use hyper::service::make_service_fn; +use hyper::Server; +use hyper::{Body, Request, StatusCode}; use rustls::internal::pemfile; -use std::fmt::{self, Display}; +use std::convert::Infallible; use std::fs::File; use std::net::SocketAddr; use std::sync::Arc; use std::{io, thread}; +use tokio::net::TcpListener; +use tokio::runtime::Runtime; +use tokio::stream::StreamExt; use tokio_rustls::TlsAcceptor; -use tokio_tcp; - /// Errors that can be returned by an ApiEndpoint implementation. -#[derive(Debug)] -pub struct Error { - inner: Context, -} - -#[derive(Clone, Eq, PartialEq, Debug, Fail, Serialize, Deserialize)] -pub enum ErrorKind { - #[fail(display = "Internal error: {}", _0)] +#[derive(Clone, Eq, PartialEq, Debug, thiserror::Error, Serialize, Deserialize)] +pub enum Error { + #[error("Internal error: {0}")] Internal(String), - #[fail(display = "Bad arguments: {}", _0)] + #[error("Bad arguments: {0}")] Argument(String), - #[fail(display = "Not found.")] + #[error("Not found.")] NotFound, - #[fail(display = "Request error: {}", _0)] + #[error("Request error: {0}")] RequestError(String), - #[fail(display = "ResponseError error: {}", _0)] + #[error("ResponseError error: {0}")] ResponseError(String), - #[fail(display = "Router error: {}", _0)] - Router(RouterError), -} - -impl Fail for Error { - fn cause(&self) -> Option<&dyn Fail> { - self.inner.cause() - } - - fn backtrace(&self) -> Option<&Backtrace> { - self.inner.backtrace() - } -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Display::fmt(&self.inner, f) - } -} - -impl Error { - pub fn kind(&self) -> &ErrorKind { - self.inner.get_context() - } -} - -impl From for Error { - fn from(kind: ErrorKind) -> Error { - Error { - inner: Context::new(kind), - } - } + #[error("Router error: {source}")] + Router { + #[from] + source: RouterError, + }, } -impl From> for Error { - fn from(inner: Context) -> Error { - Error { inner: inner } - } -} - -impl From for Error { - fn from(error: RouterError) -> Error { - Error { - inner: Context::new(ErrorKind::Router(error)), - } +impl From for Error { + fn from(error: crate::chain::Error) -> Error { + Error::Internal(error.to_string()) } } @@ -118,29 +79,26 @@ impl TLSConfig { } fn load_certs(&self) -> Result, Error> { - let certfile = File::open(&self.certificate).context(ErrorKind::Internal(format!( - "failed to open file {}", - self.certificate - )))?; + let certfile = File::open(&self.certificate).map_err(|e| { + Error::Internal(format!("failed to open file {} {}", self.certificate, e)) + })?; let mut reader = io::BufReader::new(certfile); pemfile::certs(&mut reader) - .map_err(|_| ErrorKind::Internal("failed to load certificate".to_string()).into()) + .map_err(|_| Error::Internal("failed to load certificate".to_string())) } fn load_private_key(&self) -> Result { - let keyfile = File::open(&self.private_key).context(ErrorKind::Internal(format!( - "failed to open file {}", - self.private_key - )))?; + let keyfile = File::open(&self.private_key) + .map_err(|e| Error::Internal(format!("failed to open private key file {}", e)))?; + let mut reader = io::BufReader::new(keyfile); let keys = pemfile::pkcs8_private_keys(&mut reader) - .map_err(|_| ErrorKind::Internal("failed to load private key".to_string()))?; + .map_err(|_| Error::Internal("failed to load private key".to_string()))?; + if keys.len() != 1 { - return Err(ErrorKind::Internal( - "expected a single private key".to_string(), - ))?; + return Err(Error::Internal("expected a single private key".to_string()))?; } Ok(keys[0].clone()) } @@ -150,9 +108,8 @@ impl TLSConfig { let key = self.load_private_key()?; let mut cfg = rustls::ServerConfig::new(rustls::NoClientAuth::new()); cfg.set_single_cert(certs, key) - .context(ErrorKind::Internal( - "set single certificate failed".to_string(), - ))?; + .map_err(|e| Error::Internal(format!("set single certificate failed {}", e)))?; + Ok(Arc::new(cfg)) } } @@ -178,10 +135,11 @@ impl ApiServer { addr: SocketAddr, router: Router, conf: Option, + api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>), ) -> Result, Error> { match conf { - Some(conf) => self.start_tls(addr, router, conf), - None => self.start_no_tls(addr, router), + Some(conf) => self.start_tls(addr, router, conf, api_chan), + None => self.start_no_tls(addr, router, api_chan), } } @@ -190,26 +148,45 @@ impl ApiServer { &mut self, addr: SocketAddr, router: Router, + api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>), ) -> Result, Error> { if self.shutdown_sender.is_some() { - return Err(ErrorKind::Internal( + return Err(Error::Internal( "Can't start HTTP API server, it's running already".to_string(), - ))?; + )); } - let (tx, _rx) = oneshot::channel::<()>(); + let rx = &mut api_chan.1; + let tx = &mut api_chan.0; + + // Jones's trick to update memory + let m = oneshot::channel::<()>(); + let tx = std::mem::replace(tx, m.0); self.shutdown_sender = Some(tx); + thread::Builder::new() .name("apis".to_string()) .spawn(move || { - let server = Server::bind(&addr) - .serve(router) - // TODO graceful shutdown is unstable, investigate - //.with_graceful_shutdown(rx) - .map_err(|e| eprintln!("HTTP API server error: {}", e)); + let server = async move { + let server = Server::bind(&addr) + .serve(make_service_fn(move |_| { + let router = router.clone(); + async move { Ok::<_, Infallible>(router) } + })) + .with_graceful_shutdown(async { + rx.await.ok(); + }); - rt::run(server); + server.await + }; + + let mut rt = Runtime::new() + .map_err(|e| eprintln!("HTTP API server error: {}", e)) + .unwrap(); + if let Err(e) = rt.block_on(server) { + eprintln!("HTTP API server error: {}", e) + } }) - .map_err(|_| ErrorKind::Internal("failed to spawn API thread".to_string()).into()) + .map_err(|_| Error::Internal("failed to spawn API thread".to_string())) } /// Starts the TLS ApiServer at the provided address. @@ -219,39 +196,55 @@ impl ApiServer { addr: SocketAddr, router: Router, conf: TLSConfig, + api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>), ) -> Result, Error> { if self.shutdown_sender.is_some() { - return Err(ErrorKind::Internal( + return Err(Error::Internal( "Can't start HTTPS API server, it's running already".to_string(), - ))?; + )); } - let tls_conf = conf.build_server_config()?; - let acceptor = TlsAcceptor::from(tls_conf); + let rx = &mut api_chan.1; + let tx = &mut api_chan.0; + + // Jones's trick to update memory + let m = oneshot::channel::<()>(); + let tx = std::mem::replace(tx, m.0); + self.shutdown_sender = Some(tx); + + let acceptor = TlsAcceptor::from(conf.build_server_config()?); thread::Builder::new() .name("apis".to_string()) .spawn(move || { - let listener = tokio_tcp::TcpListener::bind(&addr).expect("failed to bind"); - let acceptor = acceptor.clone(); - let tls = listener - .incoming() - .and_then(move |s| acceptor.accept(s)) - .then(|r| match r { - Ok(x) => Ok::<_, io::Error>(Some(x)), - Err(e) => { - error!("accept_async failed: {}", e); - Ok(None) - } - }) - .filter_map(|x| x); - let server = Server::builder(tls) - .serve(router) - .map_err(|e| eprintln!("HTTP API server error: {}", e)); - - rt::run(server); + let server = async move { + let mut listener = TcpListener::bind(&addr).await.expect("failed to bind"); + + let listener = listener + .incoming() + .and_then(move |s| acceptor.accept(s)) + .filter(|r| r.is_ok()); + + let server = Server::builder(accept::from_stream(listener)) + .serve(make_service_fn(move |_| { + let router = router.clone(); + async move { Ok::<_, Infallible>(router) } + })) + .with_graceful_shutdown(async { + rx.await.ok(); + }); + + server.await + }; + + let mut rt = Runtime::new() + .map_err(|e| eprintln!("HTTP API server error: {}", e)) + .unwrap(); + if let Err(e) = rt.block_on(server) { + eprintln!("HTTP API server error: {}", e) + } }) - .map_err(|_| ErrorKind::Internal("failed to spawn API thread".to_string()).into()) + .map_err(|_| Error::Internal("failed to spawn API thread".to_string())) } /// Stops the API server, it panics in case of error @@ -278,6 +271,7 @@ impl Handler for LoggingMiddleware { mut handlers: Box>, ) -> ResponseFuture { debug!("REST call: {} {}", req.method(), req.uri().path()); + match handlers.next() { Some(handler) => handler.call(req, handlers), None => response(StatusCode::INTERNAL_SERVER_ERROR, "no handler found"), diff --git a/api/src/router.rs b/api/src/router.rs index 9aa1abdd..5a3c3aab 100644 --- a/api/src/router.rs +++ b/api/src/router.rs @@ -12,21 +12,58 @@ // See the License for the specific language governing permissions and // limitations under the License. -use futures::future; -use hyper; -use hyper::rt::Future; -use hyper::service::{NewService, Service}; -use hyper::{Body, Method, Request, Response, StatusCode}; +//use hyper::rt::Future; +//use hyper::service::{NewService, Service}; +use futures::future::{self, Future}; + +use hyper::service::Service; +use hyper::{Method, StatusCode}; use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; +use std::pin::Pin; use std::sync::Arc; +use std::task::{Context, Poll}; + +use hyper::{Body, Request, Response}; + +const MAX_CHILDREN: usize = 16; lazy_static! { static ref WILDCARD_HASH: u64 = calculate_hash(&"*"); static ref WILDCARD_STOP_HASH: u64 = calculate_hash(&"**"); } -pub type ResponseFuture = Box, Error = hyper::Error> + Send>; +pub type HandlerObj = Arc; + +pub type ResponseFuture = + Pin, hyper::Error>> + Send>>; + +#[derive(Clone, thiserror::Error, Eq, Debug, PartialEq, Serialize, Deserialize)] +pub enum RouterError { + #[error("Route already exists")] + RouteAlreadyExists, + #[error("Route not found")] + RouteNotFound, + #[error("Value not found")] + NoValue, +} + +#[derive(Clone)] +pub struct Router { + nodes: Vec, +} + +#[derive(Debug, Clone, Copy)] +struct NodeId(usize); + +#[derive(Clone)] +pub struct Node { + key: u64, + value: Option, + children: [NodeId; MAX_CHILDREN], + children_count: usize, + mws: Option>, +} pub trait Handler { fn get(&self, _req: Request) -> ResponseFuture { @@ -70,52 +107,21 @@ pub trait Handler { req: Request, mut _handlers: Box>, ) -> ResponseFuture { - match req.method() { - &Method::GET => self.get(req), - &Method::POST => self.post(req), - &Method::PUT => self.put(req), - &Method::DELETE => self.delete(req), - &Method::PATCH => self.patch(req), - &Method::OPTIONS => self.options(req), - &Method::CONNECT => self.connect(req), - &Method::TRACE => self.trace(req), - &Method::HEAD => self.head(req), + match *req.method() { + Method::GET => self.get(req), + Method::POST => self.post(req), + Method::PUT => self.put(req), + Method::DELETE => self.delete(req), + Method::PATCH => self.patch(req), + Method::OPTIONS => self.options(req), + Method::CONNECT => self.connect(req), + Method::TRACE => self.trace(req), + Method::HEAD => self.head(req), _ => not_found(), } } } -#[derive(Clone, Fail, Eq, Debug, PartialEq, Serialize, Deserialize)] -pub enum RouterError { - #[fail(display = "Route already exists")] - RouteAlreadyExists, - #[fail(display = "Route not found")] - RouteNotFound, - #[fail(display = "Value not found")] - NoValue, -} - -#[derive(Clone)] -pub struct Router { - nodes: Vec, -} - -#[derive(Debug, Clone, Copy)] -struct NodeId(usize); - -const MAX_CHILDREN: usize = 16; - -pub type HandlerObj = Arc; - -#[derive(Clone)] -pub struct Node { - key: u64, - value: Option, - children: [NodeId; MAX_CHILDREN], - children_count: usize, - mws: Option>, -} - impl Router { pub fn new() -> Router { let root = Node::new(calculate_hash(&""), None); @@ -203,13 +209,16 @@ impl Router { } } -impl Service for Router { - type ReqBody = Body; - type ResBody = Body; +impl Service> for Router { + type Response = hyper::Response; type Error = hyper::Error; type Future = ResponseFuture; - fn call(&mut self, req: Request) -> Self::Future { + fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { + Poll::Ready(Ok(())) + } + + fn call(&mut self, req: hyper::Request) -> Self::Future { match self.get(req.uri().path()) { Err(_) => not_found(), Ok(mut handlers) => match handlers.next() { @@ -220,18 +229,6 @@ impl Service for Router { } } -impl NewService for Router { - type ReqBody = Body; - type ResBody = Body; - type Error = hyper::Error; - type InitError = hyper::Error; - type Service = Router; - type Future = Box + Send>; - fn new_service(&self) -> Self::Future { - Box::new(future::ok(self.clone())) - } -} - impl Node { fn new(key: u64, value: Option) -> Node { Node { @@ -276,7 +273,7 @@ impl Node { pub fn not_found() -> ResponseFuture { let mut response = Response::new(Body::empty()); *response.status_mut() = StatusCode::NOT_FOUND; - Box::new(future::ok(response)) + Box::pin(future::ok(response)) } fn calculate_hash(t: &T) -> u64 { @@ -305,19 +302,22 @@ fn collect_node_middleware(handlers: &mut Vec, node: &Node) { mod tests { use super::*; - use tokio::prelude::future::ok; - use tokio_core::reactor::Core; + use futures::executor::block_on; + //use tokio::prelude::future::ok; + //use tokio_core::reactor::Core; struct HandlerImpl(u16); impl Handler for HandlerImpl { fn get(&self, _req: Request) -> ResponseFuture { - Box::new(future::ok( - Response::builder() - .status(self.0) + let code = self.0; + Box::pin(async move { + let res = Response::builder() + .status(code) .body(Body::default()) - .unwrap(), - )) + .unwrap(); + Ok(res) + }) } } @@ -358,15 +358,18 @@ mod tests { .unwrap(); let call_handler = |url| { - let mut event_loop = Core::new().unwrap(); - let task = routes - .get(url) - .unwrap() - .next() - .unwrap() - .get(Request::new(Body::default())) - .and_then(|resp| ok(resp.status().as_u16())); - event_loop.run(task).unwrap() + let task = async { + let resp = routes + .get(url) + .unwrap() + .next() + .unwrap() + .get(Request::new(Body::default())) + .await + .unwrap(); + resp.status().as_u16() + }; + block_on(task) }; assert_eq!(call_handler("/v1/users"), 101); diff --git a/api/src/types.rs b/api/src/types.rs index 8bb9188b..803cb9fe 100644 --- a/api/src/types.rs +++ b/api/src/types.rs @@ -35,7 +35,7 @@ macro_rules! no_dup { ($field:ident) => { if $field.is_some() { return Err(serde::de::Error::duplicate_field("$field")); - } + } }; } @@ -99,7 +99,7 @@ impl Status { Status { protocol_version: ser::ProtocolVersion::local().into(), user_agent: p2p::msg::USER_AGENT.to_string(), - connections: connections, + connections, tip: Tip::from_tip(current_tip), sync_status, sync_info, @@ -197,8 +197,8 @@ impl Output { commit: PrintableCommitment { commit: commit.clone(), }, - height: height, - mmr_index: mmr_index, + height, + mmr_index, } } } @@ -475,10 +475,10 @@ impl<'de> serde::de::Deserialize<'de> for OutputPrintable { output_type: output_type.unwrap(), commit: commit.unwrap(), spent: spent.unwrap(), - proof: proof, + proof, proof_hash: proof_hash.unwrap(), block_height: block_height.unwrap(), - merkle_proof: merkle_proof, + merkle_proof, mmr_index: mmr_index.unwrap(), }) } @@ -682,9 +682,9 @@ impl BlockPrintable { .collect(); Ok(BlockPrintable { header: BlockHeaderPrintable::from_header(&block.header), - inputs: inputs, - outputs: outputs, - kernels: kernels, + inputs, + outputs, + kernels, }) } } diff --git a/api/src/web.rs b/api/src/web.rs index 69d3c734..1e655e37 100755 --- a/api/src/web.rs +++ b/api/src/web.rs @@ -1,30 +1,26 @@ use crate::rest::*; use crate::router::ResponseFuture; -use futures::future::{err, ok}; -use futures::{Future, Stream}; +use bytes::buf::BufExt; +use futures::future::ok; use hyper::{Body, Request, Response, StatusCode}; use serde::{Deserialize, Serialize}; use serde_json; use std::collections::HashMap; use std::fmt::Debug; use url::form_urlencoded; - /// Parse request body -pub fn parse_body(req: Request) -> Box + Send> +pub async fn parse_body(req: Request) -> Result where for<'de> T: Deserialize<'de> + Send + 'static, { - Box::new( - req.into_body() - .concat2() - .map_err(|e| ErrorKind::RequestError(format!("Failed to read request: {}", e)).into()) - .and_then(|body| match serde_json::from_reader(&body.to_vec()[..]) { - Ok(obj) => ok(obj), - Err(e) => { - err(ErrorKind::RequestError(format!("Invalid request body: {}", e)).into()) - } - }), - ) + // Aggregate the body... + let whole_body = hyper::body::aggregate(req) + .await + .map_err(|e| Error::RequestError(format!("Failed to read request: {}", e)))?; + + // Decode as JSON... + serde_json::from_reader(whole_body.reader()) + .map_err(|e| Error::RequestError(format!("Invalid request body: {}", e))) } /// Convert Result to ResponseFuture @@ -34,16 +30,14 @@ where { match res { Ok(s) => json_response_pretty(&s), - Err(e) => match e.kind() { - ErrorKind::Argument(msg) => response(StatusCode::BAD_REQUEST, msg.clone()), - ErrorKind::RequestError(msg) => response(StatusCode::BAD_REQUEST, msg.clone()), - ErrorKind::NotFound => response(StatusCode::NOT_FOUND, ""), - ErrorKind::Internal(msg) => response(StatusCode::INTERNAL_SERVER_ERROR, msg.clone()), - ErrorKind::ResponseError(msg) => { - response(StatusCode::INTERNAL_SERVER_ERROR, msg.clone()) - } + Err(e) => match e { + Error::Argument(msg) => response(StatusCode::BAD_REQUEST, msg.clone()), + Error::RequestError(msg) => response(StatusCode::BAD_REQUEST, msg.clone()), + Error::NotFound => response(StatusCode::NOT_FOUND, ""), + Error::Internal(msg) => response(StatusCode::INTERNAL_SERVER_ERROR, msg.clone()), + Error::ResponseError(msg) => response(StatusCode::INTERNAL_SERVER_ERROR, msg.clone()), // place holder - ErrorKind::Router(_) => response(StatusCode::INTERNAL_SERVER_ERROR, ""), + Error::Router { .. } => response(StatusCode::INTERNAL_SERVER_ERROR, ""), }, } } @@ -83,7 +77,7 @@ pub fn just_response + Debug>(status: StatusCode, text: T) -> Resp /// Text response as future pub fn response + Debug>(status: StatusCode, text: T) -> ResponseFuture { - Box::new(ok(just_response(status, text))) + Box::pin(ok(just_response(status, text))) } pub struct QueryParams { @@ -152,7 +146,7 @@ macro_rules! must_get_query( ($req: expr) =>( match $req.uri().query() { Some(q) => q, - None => return Err(ErrorKind::RequestError("no query string".to_owned()))?, + None => return Err(Error::RequestError("no query string".to_owned()))?, } )); @@ -163,7 +157,7 @@ macro_rules! parse_param( None => $default, Some(val) => match val.parse() { Ok(val) => val, - Err(_) => return Err(ErrorKind::RequestError(format!("invalid value of parameter {}", $name)))?, + Err(_) => return Err(Error::RequestError(format!("invalid value of parameter {}", $name)))?, } } )); diff --git a/api/tests/rest.rs b/api/tests/rest.rs index 702d5390..b99fe7d4 100755 --- a/api/tests/rest.rs +++ b/api/tests/rest.rs @@ -2,9 +2,10 @@ use epic_api as api; use epic_util as util; use crate::api::*; +use futures::channel::oneshot; use hyper::{Body, Request, StatusCode}; use std::net::SocketAddr; -use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; +use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::{thread, time}; @@ -27,7 +28,7 @@ pub struct CounterMiddleware { impl CounterMiddleware { fn new() -> CounterMiddleware { CounterMiddleware { - counter: ATOMIC_USIZE_INIT, + counter: AtomicUsize::new(0), } } @@ -57,7 +58,7 @@ fn build_router() -> Router { router .add_route("/v1/*", Arc::new(index_handler)) .expect("add_route failed") - .add_middleware(Arc::new(LoggingMiddleware {})); + .add_middleware(Arc::new(epic_api::rest::LoggingMiddleware {})); router } @@ -71,7 +72,9 @@ fn test_start_api() { router.add_middleware(counter.clone()); let server_addr = "127.0.0.1:14434"; let addr: SocketAddr = server_addr.parse().expect("unable to parse server address"); - assert!(server.start(addr, router, None).is_ok()); + let api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>) = + Box::leak(Box::new(oneshot::channel::<()>())); + assert!(server.start(addr, router, None, api_chan).is_ok()); let url = format!("http://{}/v1/", server_addr); let index = request_with_retry(url.as_str()).unwrap(); assert_eq!(index.len(), 2); @@ -96,7 +99,9 @@ fn test_start_api_tls() { let router = build_router(); let server_addr = "0.0.0.0:14444"; let addr: SocketAddr = server_addr.parse().expect("unable to parse server address"); - assert!(server.start(addr, router, Some(tls_conf)).is_ok()); + let api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>) = + Box::leak(Box::new(oneshot::channel::<()>())); + assert!(server.start(addr, router, Some(tls_conf), api_chan).is_ok()); let index = request_with_retry("https://yourdomain.com:14444/v1/").unwrap(); assert_eq!(index.len(), 2); assert!(!server.stop()); diff --git a/pool/src/pool.rs b/pool/src/pool.rs index e3dd7859..8a3b062d 100644 --- a/pool/src/pool.rs +++ b/pool/src/pool.rs @@ -29,16 +29,22 @@ use std::sync::Arc; //use self::util::RwLock; //use epic_util as util; -pub struct Pool { +pub struct Pool +where + B: BlockChain, +{ /// Entries in the pool (tx + info + timer) in simple insertion order. pub entries: Vec, /// The blockchain - pub blockchain: Arc, + pub blockchain: Arc, pub name: String, } -impl Pool { - pub fn new(chain: Arc, name: String) -> Pool { +impl Pool +where + B: BlockChain, +{ + pub fn new(chain: Arc, name: String) -> Self { Pool { entries: vec![], blockchain: chain, diff --git a/pool/src/transaction_pool.rs b/pool/src/transaction_pool.rs index 4317869f..9b1f6bd6 100644 --- a/pool/src/transaction_pool.rs +++ b/pool/src/transaction_pool.rs @@ -30,35 +30,36 @@ use std::collections::VecDeque; use std::sync::Arc; /// Transaction pool implementation. -pub struct TransactionPool { +pub struct TransactionPool +where + B: BlockChain, + P: PoolAdapter, +{ /// Pool Config pub config: PoolConfig, /// Our transaction pool. - pub txpool: Pool, + pub txpool: Pool, /// Our Dandelion "stempool". - pub stempool: Pool, + pub stempool: Pool, /// Cache of previous txs in case of a re-org. pub reorg_cache: Arc>>, /// The blockchain - pub blockchain: Arc, + pub blockchain: Arc, /// The pool adapter - pub adapter: Arc, + pub adapter: Arc

, } -impl TransactionPool { +impl TransactionPool +where + B: BlockChain, + P: PoolAdapter, +{ /// Create a new transaction pool - pub fn new( - config: PoolConfig, - chain: Arc, - adapter: Arc, - ) -> TransactionPool { + pub fn new(config: PoolConfig, chain: Arc, adapter: Arc

) -> Self { TransactionPool { config, txpool: Pool::new(chain.clone(), "txpool".to_string()), - stempool: Pool::new( - chain.clone(), - "stempool".to_string(), - ), + stempool: Pool::new(chain.clone(), "stempool".to_string()), reorg_cache: Arc::new(RwLock::new(VecDeque::new())), blockchain: chain, adapter, diff --git a/servers/Cargo.toml b/servers/Cargo.toml index fd08062f..2cd56d6f 100755 --- a/servers/Cargo.toml +++ b/servers/Cargo.toml @@ -11,10 +11,10 @@ edition = "2018" [dependencies] hyper = "0.13" -hyper-rustls = "0.19" +hyper-rustls = "0.20" fs2 = "0.4" futures = "0.3" -http = "0.1.5" +http = "0.2" itertools = "0.7" lmdb-zero = "0.4.4" rand = "0.6" @@ -30,7 +30,6 @@ clokwerk = "0.2.1" trust-dns-resolver = "0.11.1" walkdir = "2.2.9" -async-trait = "0.1.73" epic_api = { path = "../api", version = "3.4.0" } epic_chain = { path = "../chain", version = "3.4.0" } diff --git a/servers/src/common/adapters.rs b/servers/src/common/adapters.rs index 2199034b..a6f81ade 100644 --- a/servers/src/common/adapters.rs +++ b/servers/src/common/adapters.rs @@ -35,7 +35,7 @@ use crate::core::pow::Difficulty; use crate::core::{core, global}; use crate::p2p; use crate::p2p::types::PeerInfo; -use crate::pool; +use crate::pool::{self, BlockChain, PoolAdapter}; use crate::util::OneTime; use chrono::prelude::*; use chrono::Duration; @@ -44,16 +44,24 @@ use rand::prelude::*; /// Implementation of the NetAdapter for the . Gets notified when new /// blocks and transactions are received and forwards to the chain and pool /// implementations. -pub struct NetToChainAdapter { +pub struct NetToChainAdapter +where + B: BlockChain, + P: PoolAdapter, +{ sync_state: Arc, chain: Weak, - tx_pool: Arc>, + tx_pool: Arc>>, peers: OneTime>, config: ServerConfig, hooks: Vec>, } -impl p2p::ChainAdapter for NetToChainAdapter { +impl p2p::ChainAdapter for NetToChainAdapter +where + B: BlockChain, + P: PoolAdapter, +{ fn total_difficulty(&self) -> Result { Ok(self.chain().head()?.total_difficulty) } @@ -532,15 +540,19 @@ impl p2p::ChainAdapter for NetToChainAdapter { } } -impl NetToChainAdapter { +impl NetToChainAdapter +where + B: BlockChain, + P: PoolAdapter, +{ /// Construct a new NetToChainAdapter instance pub fn new( sync_state: Arc, chain: Arc, - tx_pool: Arc>, + tx_pool: Arc>>, config: ServerConfig, hooks: Vec>, - ) -> NetToChainAdapter { + ) -> Self { NetToChainAdapter { sync_state, chain: Arc::downgrade(&chain), @@ -765,13 +777,21 @@ impl NetToChainAdapter { /// Implementation of the ChainAdapter for the network. Gets notified when the /// accepted a new block, asking the pool to update its state and /// the network to broadcast the block -pub struct ChainToPoolAndNetAdapter { - tx_pool: Arc>, +pub struct ChainToPoolAndNetAdapter +where + B: BlockChain, + P: PoolAdapter, +{ + tx_pool: Arc>>, peers: OneTime>, hooks: Vec>, } -impl ChainAdapter for ChainToPoolAndNetAdapter { +impl ChainAdapter for ChainToPoolAndNetAdapter +where + B: BlockChain, + P: PoolAdapter, +{ fn block_accepted(&self, b: &core::Block, status: BlockStatus, opts: Options) { // not broadcasting blocks received through sync if !opts.contains(chain::Options::SYNC) { @@ -815,12 +835,16 @@ impl ChainAdapter for ChainToPoolAndNetAdapter { } } -impl ChainToPoolAndNetAdapter { +impl ChainToPoolAndNetAdapter +where + B: BlockChain, + P: PoolAdapter, +{ /// Construct a ChainToPoolAndNetAdapter instance. pub fn new( - tx_pool: Arc>, + tx_pool: Arc>>, hooks: Vec>, - ) -> ChainToPoolAndNetAdapter { + ) -> Self { ChainToPoolAndNetAdapter { tx_pool, peers: OneTime::new(), diff --git a/servers/src/common/hooks.rs b/servers/src/common/hooks.rs index b73cea73..5de3765c 100644 --- a/servers/src/common/hooks.rs +++ b/servers/src/common/hooks.rs @@ -25,6 +25,7 @@ use crate::core::core; use crate::core::core::hash::Hashed; use crate::p2p::types::PeerAddr; +use futures::TryFutureExt; use hyper::client::HttpConnector; use hyper::header::HeaderValue; use hyper::Client; @@ -33,8 +34,7 @@ use hyper_rustls::HttpsConnector; use serde::Serialize; use serde_json::{json, to_string}; use std::time::Duration; - -use async_trait::async_trait; +use tokio::runtime::{Builder, Runtime}; /// Returns the list of event hooks that will be initialized for network events pub fn init_net_hooks(config: &ServerConfig) -> Vec> { @@ -59,32 +59,29 @@ pub fn init_chain_hooks(config: &ServerConfig) -> Vec { info!( @@ -181,6 +177,8 @@ struct WebHook { block_accepted_url: Option, /// The hyper client to be used for all requests client: Client>, + /// The tokio event loop + runtime: Runtime, } impl WebHook { @@ -212,6 +210,12 @@ impl WebHook { header_received_url, block_accepted_url, client, + runtime: Builder::new() + .threaded_scheduler() + .enable_all() + .core_threads(nthreads as usize) + .build() + .unwrap(), } } @@ -227,7 +231,7 @@ impl WebHook { ) } - async fn post(&self, url: hyper::Uri, data: String) { + fn post(&self, url: hyper::Uri, data: String) { let mut req = Request::new(Body::from(data)); *req.method_mut() = Method::POST; *req.uri_mut() = url.clone(); @@ -235,20 +239,15 @@ impl WebHook { hyper::header::CONTENT_TYPE, HeaderValue::from_static("application/json"), ); - info!("####### servers common hooks post req: {:?}", req); - let _future = self - .client - .request(req).await - /*.map(|_res| {}) - .map_err(move |_res| { - warn!("Error sending POST request to {}", url); - })*/; - - //future. - //let handle = self.runtime.executor(); - //handle.spawn(future); + + let future = self.client.request(req).map_err(move |_res| { + warn!("Error sending POST request to {}", url); + }); + + self.runtime.spawn(future); } - async fn make_request(&self, payload: &T, uri: &Option) -> bool { + + fn make_request(&self, payload: &T, uri: &Option) -> bool { if let Some(url) = uri { let payload = match to_string(payload) { Ok(serialized) => serialized, @@ -256,15 +255,14 @@ impl WebHook { return false; // print error message } }; - self.post(url.clone(), payload).await; + self.post(url.clone(), payload); } true } } -#[async_trait] impl ChainEvents for WebHook { - async fn on_block_accepted(&self, block: &core::Block, status: &BlockStatus) { + fn on_block_accepted(&self, block: &core::Block, status: &BlockStatus) { let status_str = match status { BlockStatus::Reorg(_) => "reorg", BlockStatus::Fork => "fork", @@ -288,7 +286,7 @@ impl ChainEvents for WebHook { }) }; - if !self.make_request(&payload, &self.block_accepted_url).await { + if !self.make_request(&payload, &self.block_accepted_url) { error!( "Failed to serialize block {} at height {}", block.hash(), @@ -298,27 +296,26 @@ impl ChainEvents for WebHook { } } -#[async_trait] impl NetEvents for WebHook { /// Triggers when a new transaction arrives - async fn on_transaction_received(&self, tx: &core::Transaction) { + fn on_transaction_received(&self, tx: &core::Transaction) { let payload = json!({ "hash": tx.hash().to_hex(), "data": tx }); - if !self.make_request(&payload, &self.tx_received_url).await { + if !self.make_request(&payload, &self.tx_received_url) { error!("Failed to serialize transaction {}", tx.hash()); } } /// Triggers when a new block arrives - async fn on_block_received(&self, block: &core::Block, addr: &PeerAddr) { + fn on_block_received(&self, block: &core::Block, addr: &PeerAddr) { let payload = json!({ "hash": block.header.hash().to_hex(), "peer": addr, "data": block }); - if !self.make_request(&payload, &self.block_received_url).await { + if !self.make_request(&payload, &self.block_received_url) { error!( "Failed to serialize block {} at height {}", block.hash().to_hex(), @@ -328,13 +325,13 @@ impl NetEvents for WebHook { } /// Triggers when a new block header arrives - async fn on_header_received(&self, header: &core::BlockHeader, addr: &PeerAddr) { + fn on_header_received(&self, header: &core::BlockHeader, addr: &PeerAddr) { let payload = json!({ "hash": header.hash().to_hex(), "peer": addr, "data": header }); - if !self.make_request(&payload, &self.header_received_url).await { + if !self.make_request(&payload, &self.header_received_url) { error!( "Failed to serialize header {} at height {}", header.hash(), diff --git a/servers/src/epic/dandelion_monitor.rs b/servers/src/epic/dandelion_monitor.rs index 80a438c3..c44f0b2e 100644 --- a/servers/src/epic/dandelion_monitor.rs +++ b/servers/src/epic/dandelion_monitor.rs @@ -12,18 +12,18 @@ // See the License for the specific language governing permissions and // limitations under the License. +use crate::common::adapters::DandelionAdapter; +use crate::core::core::hash::Hashed; +use crate::core::core::transaction; +use crate::pool::{BlockChain, DandelionConfig, Pool, PoolEntry, PoolError, TxSource}; +use crate::util::StopState; + +use crate::ServerTxPool; use chrono::prelude::Utc; use rand::{thread_rng, Rng}; use std::sync::Arc; use std::thread; use std::time::{Duration, Instant}; - -use crate::common::adapters::DandelionAdapter; -use crate::core::core::hash::Hashed; -use crate::core::core::transaction; -use crate::pool::{DandelionConfig, Pool, PoolEntry, PoolError, TransactionPool, TxSource}; -use crate::util::{RwLock, StopState}; - /// A process to monitor transactions in the stempool. /// With Dandelion, transaction can be broadcasted in stem or fluff phase. /// When sent in stem phase, the transaction is relayed to only node: the @@ -34,7 +34,7 @@ use crate::util::{RwLock, StopState}; /// sending only to the peer relay. pub fn monitor_transactions( dandelion_config: DandelionConfig, - tx_pool: Arc>, + tx_pool: ServerTxPool, adapter: Arc, stop_state: Arc, ) -> std::io::Result> { @@ -84,7 +84,10 @@ pub fn monitor_transactions( // Query the pool for transactions older than the cutoff. // Used for both periodic fluffing and handling expired embargo timer. -fn select_txs_cutoff(pool: &Pool, cutoff_secs: u16) -> Vec { +fn select_txs_cutoff(pool: &Pool, cutoff_secs: u16) -> Vec +where + B: BlockChain, +{ let cutoff = Utc::now().timestamp() - cutoff_secs as i64; pool.entries .iter() @@ -95,7 +98,7 @@ fn select_txs_cutoff(pool: &Pool, cutoff_secs: u16) -> Vec { fn process_fluff_phase( dandelion_config: &DandelionConfig, - tx_pool: &Arc>, + tx_pool: &ServerTxPool, adapter: &Arc, ) -> Result<(), PoolError> { // Take a write lock on the txpool for the duration of this processing. @@ -143,7 +146,7 @@ fn process_fluff_phase( fn process_expired_entries( dandelion_config: &DandelionConfig, - tx_pool: &Arc>, + tx_pool: &ServerTxPool, ) -> Result<(), PoolError> { // Take a write lock on the txpool for the duration of this processing. let mut tx_pool = tx_pool.write(); diff --git a/servers/src/epic/server.rs b/servers/src/epic/server.rs index c31d1ae8..fd638145 100644 --- a/servers/src/epic/server.rs +++ b/servers/src/epic/server.rs @@ -54,6 +54,7 @@ use crate::util::{RwLock, StopState}; use clokwerk::{/*ScheduleHandle,*/ Scheduler, TimeUnits}; use epic_util::logger::LogEntry; use fs2::FileExt; +use futures::channel::oneshot; use walkdir::WalkDir; fn is_test_network() -> bool { @@ -63,6 +64,9 @@ fn is_test_network() -> bool { } } +/// Arcified thread-safe TransactionPool with type parameters used by server components +pub type ServerTxPool = Arc>>; + /// Epic server holding internal structures. pub struct Server { /// server config @@ -72,7 +76,7 @@ pub struct Server { /// data store access pub chain: Arc, /// in-memory transaction pool - pub tx_pool: Arc>, + pub tx_pool: ServerTxPool, /// Whether we're currently syncing pub sync_state: Arc, /// To be passed around to collect stats and info @@ -94,6 +98,8 @@ impl Server { config: ServerConfig, logs_rx: Option>, mut info_callback: F, + stop_state: Option>, + api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>), ) -> Result<(), Error> where F: FnMut(Server, Option>), @@ -155,7 +161,7 @@ impl Server { let enable_test_miner = config.run_test_miner; let test_miner_wallet_url = config.test_miner_wallet_url.clone(); - let serv = Server::new(config)?; + let serv = Server::new(config, stop_state, api_chan)?; if let Some(c) = mining_config { let enable_stratum_server = c.enable_stratum_server; @@ -206,7 +212,11 @@ impl Server { } /// Instantiates a new server associated with the provided future reactor. - pub fn new(config: ServerConfig) -> Result { + pub fn new( + config: ServerConfig, + stop_state: Option>, + api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>), + ) -> Result { // Obtain our lock_file or fail immediately with an error. let lock_file = Server::one_epic_at_a_time(&config)?; @@ -354,6 +364,8 @@ impl Server { api_secret.clone(), foreign_api_secret.clone(), tls_conf.clone(), + api_chan, + stop_state.clone(), )?; info!("Starting dandelion monitor: {}", &config.api_http_addr); diff --git a/servers/src/lib.rs b/servers/src/lib.rs index 7bd6bb3a..835920c7 100644 --- a/servers/src/lib.rs +++ b/servers/src/lib.rs @@ -43,4 +43,4 @@ mod mining; pub use crate::common::stats::{DiffBlock, PeerStats, ServerStats, StratumStats, WorkerStats}; pub use crate::common::types::{ServerConfig, StratumServerConfig}; -pub use crate::epic::server::Server; +pub use crate::epic::server::{Server, ServerTxPool}; diff --git a/servers/src/mining/mine_block.rs b/servers/src/mining/mine_block.rs index af2ef3fb..9a8132c8 100644 --- a/servers/src/mining/mine_block.rs +++ b/servers/src/mining/mine_block.rs @@ -15,50 +15,39 @@ //! Build a block to mine: gathers transactions from the pool, assembles //! them into a block and returns it. +<<<<<<< HEAD use crate::util::RwLock; use chrono::{NaiveDateTime, TimeZone, Utc}; -use rand::{thread_rng, Rng}; -use serde_json::{json, Value}; -use std::sync::Arc; -use std::thread; -use std::time::Duration; - +======= use crate::api; use crate::chain; use crate::common::types::Error; use crate::core::consensus::is_foundation_height; -//use crate::core::core::block::feijoada::{next_block_bottles, Deterministic, Feijoada}; use crate::core::core::foundation::load_foundation_output; pub use crate::core::core::foundation::CbData; -//use crate::core::core::hash::{Hash, Hashed}; -//use crate::core::core::{Output, TxKernel}; use crate::core::global::get_emitted_policy; use crate::core::libtx::ProofBuilder; use crate::core::pow::randomx::rx_current_seed_height; use crate::core::pow::PoWType; use crate::core::{consensus, core, global}; use crate::keychain::{ExtKeychain, Identifier, Keychain}; -use crate::pool; +use chrono::prelude::{DateTime, NaiveDateTime, Utc}; +>>>>>>> fc1a273 (update node apis future and hyper) +use rand::{thread_rng, Rng}; +use serde_json::{json, Value}; +use std::sync::Arc; +use std::thread; +use std::time::Duration; use crate::core::core::block_fees::BlockFees; - -/// Response to build a coinbase output. -/*#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct CbData { - /// Output - pub output: Output, - /// Kernel - pub kernel: TxKernel, - /// Key Id - pub key_id: Option, -}*/ +use crate::ServerTxPool; // Ensure a block suitable for mining is built and returned // If a wallet listener URL is not provided the reward will be "burnt" // Warning: This call does not return until/unless a new block can be built pub fn get_block( chain: &Arc, - tx_pool: &Arc>, + tx_pool: &ServerTxPool, key_id: Option, wallet_listener_url: Option, ) -> (core::Block, BlockFees, PoWType) { @@ -107,7 +96,7 @@ pub fn get_block( /// transactions from the pool. fn build_block( chain: &Arc, - tx_pool: &Arc>, + tx_pool: &ServerTxPool, key_id: Option, wallet_listener_url: Option, ) -> Result<(core::Block, BlockFees, PoWType), Error> { @@ -272,7 +261,7 @@ fn get_coinbase( let kernel = res.kernel; let key_id = res.key_id; let block_fees = BlockFees { - key_id: key_id, + key_id, ..block_fees }; @@ -297,7 +286,8 @@ fn build_coinbase(dest: &str, block_fees: &BlockFees, method: &str) -> Result>) { + pub fn run(&self, config: &StratumServerConfig, tx_pool: &ServerTxPool) { debug!("Run main loop"); let mut deadline: i64 = 0; let mut head = self.chain.head().unwrap(); @@ -1011,7 +1011,7 @@ pub struct StratumServer { id: String, config: StratumServerConfig, chain: Arc, - tx_pool: Arc>, + pub tx_pool: ServerTxPool, sync_state: Arc, stratum_stats: Arc>, } @@ -1021,7 +1021,7 @@ impl StratumServer { pub fn new( config: StratumServerConfig, chain: Arc, - tx_pool: Arc>, + tx_pool: ServerTxPool, stratum_stats: Arc>, ) -> StratumServer { StratumServer { diff --git a/servers/src/mining/test_miner.rs b/servers/src/mining/test_miner.rs index 1f732ed0..4ab6feec 100644 --- a/servers/src/mining/test_miner.rs +++ b/servers/src/mining/test_miner.rs @@ -17,23 +17,22 @@ //! header with its proof-of-work. Any valid mined blocks are submitted to the //! network. -use crate::util::RwLock; -use chrono::prelude::Utc; -use std::sync::Arc; - use crate::chain; use crate::common::types::StratumServerConfig; use crate::core::core::hash::{Hash, Hashed}; use crate::core::core::{Block, BlockHeader}; use crate::core::global; use crate::mining::mine_block; -use crate::pool; +use chrono::prelude::Utc; +use std::sync::Arc; + use crate::util::StopState; +use crate::ServerTxPool; pub struct Miner { config: StratumServerConfig, chain: Arc, - tx_pool: Arc>, + tx_pool: ServerTxPool, stop_state: Arc, // Just to hold the port we're on, so this miner can be identified @@ -47,7 +46,7 @@ impl Miner { pub fn new( config: StratumServerConfig, chain: Arc, - tx_pool: Arc>, + tx_pool: ServerTxPool, stop_state: Arc, ) -> Miner { Miner { diff --git a/src/bin/cmd/client.rs b/src/bin/cmd/client.rs index d5068ca4..a54c0665 100644 --- a/src/bin/cmd/client.rs +++ b/src/bin/cmd/client.rs @@ -18,6 +18,7 @@ use std::net::SocketAddr; use clap::ArgMatches; use crate::api; + use crate::config::GlobalConfig; use crate::p2p; use crate::servers::ServerConfig; @@ -132,7 +133,7 @@ pub fn list_connected_peers(config: &ServerConfig, api_secret: Option) { let peers_info = api::client::get::>(url.as_str(), api_secret); - match peers_info.map_err(|e| Error::API(e)) { + match peers_info { Ok(connected_peers) => { let mut index = 0; for connected_peer in connected_peers { @@ -159,7 +160,10 @@ fn get_status_from_node( api_secret: Option, ) -> Result { let url = format!("http://{}/v1/status", config.api_http_addr); - api::client::get::(url.as_str(), api_secret).map_err(|e| Error::API(e)) + match api::client::get::(url.as_str(), api_secret) { + Ok(status) => Ok(status), + Err(e) => Err(Error::API(e)), + } } /// Error type wrapping underlying module errors. diff --git a/src/bin/cmd/server.rs b/src/bin/cmd/server.rs index 7ba53baa..1c9b31c9 100644 --- a/src/bin/cmd/server.rs +++ b/src/bin/cmd/server.rs @@ -12,27 +12,30 @@ // See the License for the specific language governing permissions and // limitations under the License. -/// EPIC server commands processing -use std::process::exit; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::Arc; -use std::thread; -use std::time::Duration; - -use clap::ArgMatches; -use ctrlc; - use crate::config::GlobalConfig; use crate::core::global; use crate::p2p::{PeerAddr, Seeding}; use crate::servers; use crate::tui::ui; +use clap::ArgMatches; +use ctrlc; use epic_util::logger::LogEntry; +use futures::channel::oneshot; +/// EPIC server commands processing +use std::process::exit; +use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::mpsc; +use std::sync::Arc; +use std::thread; +use std::time::Duration; /// wrap below to allow UI to clean up on stop -pub fn start_server(config: servers::ServerConfig, logs_rx: Option>) { - start_server_tui(config, logs_rx); +pub fn start_server( + config: servers::ServerConfig, + logs_rx: Option>, + api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>), +) { + start_server_tui(config, logs_rx, api_chan); // Just kill process for now, otherwise the process // hangs around until sigint because the API server // currently has no shutdown facility @@ -42,7 +45,11 @@ pub fn start_server(config: servers::ServerConfig, logs_rx: Option>) { +fn start_server_tui( + config: servers::ServerConfig, + logs_rx: Option>, + api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>), +) { // Run the UI controller.. here for now for simplicity to access // everything it might need if config.run_tui.unwrap_or(false) { @@ -56,6 +63,8 @@ fn start_server_tui(config: servers::ServerConfig, logs_rx: Option>, mut global_config: GlobalConfig, logs_rx: Option>, + api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>), ) -> i32 { global::set_mining_mode( global_config @@ -134,7 +146,7 @@ pub fn server_command( if let Some(a) = server_args { match a.subcommand() { ("run", _) => { - start_server(server_config, logs_rx); + start_server(server_config, logs_rx, api_chan); } ("", _) => { println!("Subcommand required, use 'EPIC help server' for details"); @@ -148,7 +160,7 @@ pub fn server_command( } } } else { - start_server(server_config, logs_rx); + start_server(server_config, logs_rx, api_chan); } 0 } diff --git a/src/bin/epic.rs b/src/bin/epic.rs index 8a35fed9..e47ed853 100644 --- a/src/bin/epic.rs +++ b/src/bin/epic.rs @@ -32,6 +32,7 @@ use epic_p2p as p2p; use epic_servers as servers; use epic_util as util; use epic_util::logger::LogEntry; +use futures::channel::oneshot; use servers::foundation::create_foundation; use std::env; use std::path::Path; @@ -194,6 +195,9 @@ fn real_main() -> i32 { let mut logging_config = config.members.as_mut().unwrap().logging.clone().unwrap(); logging_config.tui_running = config.members.as_mut().unwrap().server.run_tui; + let api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>) = + Box::leak(Box::new(oneshot::channel::<()>())); + let (logs_tx, logs_rx) = if logging_config.tui_running.unwrap() { let (logs_tx, logs_rx) = mpsc::sync_channel::(200); (Some(logs_tx), Some(logs_rx)) @@ -219,7 +223,7 @@ fn real_main() -> i32 { match args.subcommand() { // server commands and options ("server", Some(server_args)) => { - cmd::server_command(Some(server_args), node_config.unwrap(), logs_rx) + cmd::server_command(Some(server_args), node_config.unwrap(), logs_rx, api_chan) } // client commands and options @@ -238,6 +242,6 @@ fn real_main() -> i32 { // If nothing is specified, try to just use the config file instead // this could possibly become the way to configure most things // with most command line options being phased out - _ => cmd::server_command(None, node_config.unwrap(), logs_rx), + _ => cmd::server_command(None, node_config.unwrap(), logs_rx, api_chan), } }