diff --git a/.github/doc-label-config.yml b/.github/doc-label-config.yml new file mode 100644 index 000000000000..60f20533a1d2 --- /dev/null +++ b/.github/doc-label-config.yml @@ -0,0 +1,4 @@ +Doc not needed: + - '- \[x\] This PR does not require documentation updates.' +Doc update required: + - '- \[ \] This PR does not require documentation updates.' diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 1c62e4ad4105..c0bc418013d2 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -15,5 +15,6 @@ Please explain IN DETAIL what the changes are in this PR and why they are needed - [ ] I have written the necessary rustdoc comments. - [ ] I have added the necessary unit tests and integration tests. +- [ ] This PR does not require documentation updates. ## Refer to a related PR or issue link (optional) diff --git a/.github/workflows/doc-label.yml b/.github/workflows/doc-label.yml new file mode 100644 index 000000000000..930134674abd --- /dev/null +++ b/.github/workflows/doc-label.yml @@ -0,0 +1,20 @@ +name: "PR Doc Labeler" +on: + pull_request_target: + types: [opened, edited, synchronize, ready_for_review, auto_merge_enabled, labeled, unlabeled] + +permissions: + pull-requests: write + contents: read + +jobs: + triage: + if: ${{ github.repository == 'GreptimeTeam/greptimedb' }} + runs-on: ubuntu-latest + steps: + - uses: github/issue-labeler@v3.3 + with: + configuration-path: .github/doc-label-config.yml + enable-versioned-regex: false + repo-token: ${{ secrets.GITHUB_TOKEN }} + sync-labels: 1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d3b5181981cf..ead3521eb3c8 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -91,7 +91,7 @@ env: # The scheduled version is '${{ env.NEXT_RELEASE_VERSION }}-nightly-YYYYMMDD', like v0.2.0-nigthly-20230313; NIGHTLY_RELEASE_PREFIX: nightly # Note: The NEXT_RELEASE_VERSION should be modified manually by every formal release. - NEXT_RELEASE_VERSION: v0.5.0 + NEXT_RELEASE_VERSION: v0.6.0 jobs: allocate-runners: diff --git a/.github/workflows/size-label.yml b/.github/workflows/size-label.yml index 2b504f32f6a2..c0b0d1c052be 100644 --- a/.github/workflows/size-label.yml +++ b/.github/workflows/size-label.yml @@ -1,11 +1,14 @@ name: size-labeler -on: [pull_request] +on: [pull_request_target] jobs: labeler: runs-on: ubuntu-latest name: Label the PR size + permissions: + issues: write + pull-requests: write steps: - uses: codelytv/pr-size-labeler@v1 with: @@ -22,5 +25,4 @@ jobs: This PR exceeds the recommended size of 1000 lines. Please make sure you are NOT addressing multiple issues with one PR. Note this PR might be rejected due to its size. - github_api_url: 'api.github.com' files_to_ignore: 'Cargo.lock' diff --git a/Cargo.lock b/Cargo.lock index acba7791c33d..885ca3e2e0e3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -136,9 +136,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44" +checksum = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6" dependencies = [ "anstyle", "anstyle-parse", @@ -156,37 +156,37 @@ checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" [[package]] name = "anstyle-parse" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317b9a89c1868f5ea6ff1d9539a69f45dffc21ce321ac1fd1160dfa48c8e2140" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" dependencies = [ "anstyle", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "anyhow" -version = "1.0.75" +version = "1.0.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" +checksum = "59d2a3357dde987206219e78ecfbbb6e8dad06cbb65292758d3270e6254f7355" [[package]] name = "anymap" @@ -196,7 +196,7 @@ checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72" [[package]] name = "api" -version = "0.4.4" +version = "0.5.0" dependencies = [ "common-base", "common-decimal", @@ -206,7 +206,7 @@ dependencies = [ "datatypes", "greptime-proto", "paste", - "prost 0.12.2", + "prost 0.12.3", "snafu", "tonic 0.10.2", "tonic-build 0.9.2", @@ -229,9 +229,9 @@ checksum = "b3f9eb837c6a783fbf002e3e5cc7925a3aa6893d6d42f9169517528983777590" [[package]] name = "aquamarine" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df752953c49ce90719c7bf1fc587bc8227aed04732ea0c0f85e5397d7fdbd1a1" +checksum = "d1da02abba9f9063d786eab1509833ebb2fac0f966862ca59439c76b9c566760" dependencies = [ "include_dir", "itertools 0.10.5", @@ -316,7 +316,7 @@ dependencies = [ "chrono", "chrono-tz 0.8.4", "half 2.3.1", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "num", ] @@ -395,7 +395,7 @@ dependencies = [ "bytes", "futures", "paste", - "prost 0.12.2", + "prost 0.12.3", "tokio", "tonic 0.10.2", ] @@ -461,7 +461,7 @@ dependencies = [ "arrow-data", "arrow-schema", "half 2.3.1", - "hashbrown 0.14.2", + "hashbrown 0.14.3", ] [[package]] @@ -597,7 +597,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -619,18 +619,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] name = "async-trait" -version = "0.1.74" +version = "0.1.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" +checksum = "fdf6721fb0140e4f897002dd086c06f6c27775df19cfe1fccb21181a48fd2c98" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -674,7 +674,7 @@ dependencies = [ [[package]] name = "auth" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "async-trait", @@ -770,7 +770,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -847,11 +847,11 @@ dependencies = [ [[package]] name = "benchmarks" -version = "0.4.4" +version = "0.5.0" dependencies = [ "arrow", "chrono", - "clap 4.4.8", + "clap 4.4.11", "client", "futures-util", "indicatif", @@ -899,7 +899,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -974,9 +974,9 @@ dependencies = [ [[package]] name = "borsh" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf617fabf5cdbdc92f774bfe5062d870f228b80056d41180797abf48bed4056e" +checksum = "26d4d6dafc1a3bb54687538972158f07b2c948bc57d5890df22c0739098b3028" dependencies = [ "borsh-derive", "cfg_aliases", @@ -984,15 +984,15 @@ dependencies = [ [[package]] name = "borsh-derive" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f404657a7ea7b5249e36808dff544bc88a28f26e0ac40009f674b7a009d14be3" +checksum = "bf4918709cc4dd777ad2b6303ed03cb37f3ca0ccede8c1b0d28ac6db8f4710e0" dependencies = [ "once_cell", "proc-macro-crate 2.0.0", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", "syn_derive", ] @@ -1179,7 +1179,7 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "catalog" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "arc-swap", @@ -1401,9 +1401,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.8" +version = "4.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2275f18819641850fa26c89acc84d465c1bf91ce57bc2748b28c420473352f64" +checksum = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2" dependencies = [ "clap_builder", "clap_derive", @@ -1411,9 +1411,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.4.8" +version = "4.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07cdf1b148b25c1e1f7a42225e30a0d99a615cd4637eae7365548dd4529b95bc" +checksum = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb" dependencies = [ "anstream", "anstyle", @@ -1430,7 +1430,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -1450,7 +1450,7 @@ checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "client" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "arrow-flight", @@ -1478,12 +1478,12 @@ dependencies = [ "moka", "parking_lot 0.12.1", "prometheus", - "prost 0.12.2", + "prost 0.12.3", "rand", "session", "snafu", "substrait 0.17.1", - "substrait 0.4.4", + "substrait 0.5.0", "tokio", "tokio-stream", "tonic 0.10.2", @@ -1513,14 +1513,14 @@ dependencies = [ [[package]] name = "cmd" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anymap", "async-trait", "auth", "catalog", "chrono", - "clap 4.4.8", + "clap 4.4.11", "client", "common-base", "common-catalog", @@ -1542,6 +1542,7 @@ dependencies = [ "file-engine", "frontend", "futures", + "human-panic", "lazy_static", "meta-client", "meta-srv", @@ -1550,7 +1551,7 @@ dependencies = [ "partition", "plugins", "prometheus", - "prost 0.12.2", + "prost 0.12.3", "query", "rand", "regex", @@ -1562,12 +1563,12 @@ dependencies = [ "session", "snafu", "store-api", - "substrait 0.4.4", + "substrait 0.5.0", "table", "temp-env", "tikv-jemallocator", "tokio", - "toml 0.7.8", + "toml 0.8.8", ] [[package]] @@ -1595,7 +1596,7 @@ checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335" [[package]] name = "common-base" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anymap", "bitvec", @@ -1605,12 +1606,12 @@ dependencies = [ "paste", "serde", "snafu", - "toml 0.7.8", + "toml 0.8.8", ] [[package]] name = "common-catalog" -version = "0.4.4" +version = "0.5.0" dependencies = [ "chrono", "common-error", @@ -1621,7 +1622,7 @@ dependencies = [ [[package]] name = "common-config" -version = "0.4.4" +version = "0.5.0" dependencies = [ "common-base", "humantime-serde", @@ -1629,12 +1630,12 @@ dependencies = [ "serde", "serde_json", "serde_with", - "toml 0.7.8", + "toml 0.8.8", ] [[package]] name = "common-datasource" -version = "0.4.4" +version = "0.5.0" dependencies = [ "arrow", "arrow-schema", @@ -1665,7 +1666,7 @@ dependencies = [ [[package]] name = "common-decimal" -version = "0.4.4" +version = "0.5.0" dependencies = [ "arrow", "bigdecimal", @@ -1679,7 +1680,7 @@ dependencies = [ [[package]] name = "common-error" -version = "0.4.4" +version = "0.5.0" dependencies = [ "snafu", "strum 0.25.0", @@ -1687,7 +1688,7 @@ dependencies = [ [[package]] name = "common-function" -version = "0.4.4" +version = "0.5.0" dependencies = [ "arc-swap", "build-data", @@ -1711,7 +1712,7 @@ dependencies = [ [[package]] name = "common-greptimedb-telemetry" -version = "0.4.4" +version = "0.5.0" dependencies = [ "async-trait", "common-error", @@ -1730,7 +1731,7 @@ dependencies = [ [[package]] name = "common-grpc" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "arrow-flight", @@ -1750,7 +1751,7 @@ dependencies = [ "flatbuffers", "futures", "lazy_static", - "prost 0.12.2", + "prost 0.12.3", "rand", "snafu", "tokio", @@ -1760,7 +1761,7 @@ dependencies = [ [[package]] name = "common-grpc-expr" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "async-trait", @@ -1779,7 +1780,7 @@ dependencies = [ [[package]] name = "common-macro" -version = "0.4.4" +version = "0.5.0" dependencies = [ "arc-swap", "common-query", @@ -1789,12 +1790,12 @@ dependencies = [ "snafu", "static_assertions", "syn 1.0.109", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] name = "common-mem-prof" -version = "0.4.4" +version = "0.5.0" dependencies = [ "common-error", "common-macro", @@ -1807,7 +1808,7 @@ dependencies = [ [[package]] name = "common-meta" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "async-recursion", @@ -1834,7 +1835,7 @@ dependencies = [ "hyper", "lazy_static", "prometheus", - "prost 0.12.2", + "prost 0.12.3", "rand", "regex", "rskafka", @@ -1846,13 +1847,13 @@ dependencies = [ "strum 0.25.0", "table", "tokio", - "toml 0.7.8", + "toml 0.8.8", "tonic 0.10.2", ] [[package]] name = "common-procedure" -version = "0.4.4" +version = "0.5.0" dependencies = [ "async-stream", "async-trait", @@ -1876,7 +1877,7 @@ dependencies = [ [[package]] name = "common-procedure-test" -version = "0.4.4" +version = "0.5.0" dependencies = [ "async-trait", "common-procedure", @@ -1884,7 +1885,7 @@ dependencies = [ [[package]] name = "common-query" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "async-trait", @@ -1907,7 +1908,7 @@ dependencies = [ [[package]] name = "common-recordbatch" -version = "0.4.4" +version = "0.5.0" dependencies = [ "common-error", "common-macro", @@ -1924,7 +1925,7 @@ dependencies = [ [[package]] name = "common-runtime" -version = "0.4.4" +version = "0.5.0" dependencies = [ "async-trait", "common-error", @@ -1936,13 +1937,15 @@ dependencies = [ "prometheus", "snafu", "tokio", + "tokio-metrics", + "tokio-metrics-collector", "tokio-test", "tokio-util", ] [[package]] name = "common-telemetry" -version = "0.4.4" +version = "0.5.0" dependencies = [ "backtrace", "common-error", @@ -1968,7 +1971,7 @@ dependencies = [ [[package]] name = "common-test-util" -version = "0.4.4" +version = "0.5.0" dependencies = [ "once_cell", "rand", @@ -1977,7 +1980,7 @@ dependencies = [ [[package]] name = "common-time" -version = "0.4.4" +version = "0.5.0" dependencies = [ "arrow", "chrono", @@ -1992,25 +1995,25 @@ dependencies = [ [[package]] name = "common-version" -version = "0.4.4" +version = "0.5.0" dependencies = [ "build-data", ] [[package]] name = "concurrent-queue" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f057a694a54f12365049b0958a1685bb52d567f5593b355fbf685838e873d400" +checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" dependencies = [ "crossbeam-utils", ] [[package]] name = "config" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d379af7f68bfc21714c6c7dea883544201741d2ce8274bb12fa54f89507f52a7" +checksum = "23738e11972c7643e4ec947840fc463b6a571afcd3e735bdfce7d03c7a784aca" dependencies = [ "async-trait", "json5", @@ -2082,9 +2085,9 @@ checksum = "e4c78c047431fee22c1a7bb92e00ad095a02a983affe4d8a72e2a2c62c1b94f3" [[package]] name = "const-oid" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const-random" @@ -2114,9 +2117,9 @@ checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" [[package]] name = "core-foundation" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -2124,9 +2127,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "cpp_demangle" @@ -2219,9 +2222,9 @@ dependencies = [ [[package]] name = "crossbeam" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c" +checksum = "6eb9105919ca8e40d437fc9cbb8f1975d916f1bd28afe795a48aae32a2cc8920" dependencies = [ "cfg-if 1.0.0", "crossbeam-channel", @@ -2233,9 +2236,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.8" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +checksum = "82a9b73a36529d9c47029b9fb3a6f0ea3cc916a261195352ba19e770fc1748b2" dependencies = [ "cfg-if 1.0.0", "crossbeam-utils", @@ -2243,9 +2246,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +checksum = "fca89a0e215bab21874660c67903c5f143333cab1da83d041c7ded6053774751" dependencies = [ "cfg-if 1.0.0", "crossbeam-epoch", @@ -2254,22 +2257,20 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.15" +version = "0.9.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +checksum = "0e3681d554572a651dda4186cd47240627c3d0114d45a95f6ad27f2f22e7548d" dependencies = [ "autocfg", "cfg-if 1.0.0", "crossbeam-utils", - "memoffset 0.9.0", - "scopeguard", ] [[package]] name = "crossbeam-queue" -version = "0.3.8" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +checksum = "adc6598521bb5a83d491e8c1fe51db7296019d2ca3cb93cc6c2a20369a4d78a2" dependencies = [ "cfg-if 1.0.0", "crossbeam-utils", @@ -2277,9 +2278,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.16" +version = "0.8.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +checksum = "c3a430a770ebd84726f584a90ee7f020d28db52c6d02138900f22341f866d39c" dependencies = [ "cfg-if 1.0.0", ] @@ -2376,7 +2377,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -2398,7 +2399,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -2408,7 +2409,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if 1.0.0", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "lock_api", "once_cell", "parking_lot_core 0.9.9", @@ -2440,7 +2441,7 @@ dependencies = [ "futures", "glob", "half 2.3.1", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "indexmap 2.1.0", "itertools 0.11.0", "log", @@ -2490,7 +2491,7 @@ dependencies = [ "datafusion-common", "datafusion-expr", "futures", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "log", "object_store", "parking_lot 0.12.1", @@ -2524,7 +2525,7 @@ dependencies = [ "datafusion-common", "datafusion-expr", "datafusion-physical-expr", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "itertools 0.11.0", "log", "regex-syntax 0.8.2", @@ -2547,7 +2548,7 @@ dependencies = [ "datafusion-common", "datafusion-expr", "half 2.3.1", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "hex", "indexmap 2.1.0", "itertools 0.11.0", @@ -2581,7 +2582,7 @@ dependencies = [ "datafusion-physical-expr", "futures", "half 2.3.1", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "indexmap 2.1.0", "itertools 0.11.0", "log", @@ -2616,15 +2617,15 @@ dependencies = [ "datafusion", "itertools 0.11.0", "object_store", - "prost 0.12.2", - "prost-types 0.12.2", + "prost 0.12.3", + "prost-types 0.12.3", "substrait 0.17.1", "tokio", ] [[package]] name = "datanode" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "arrow-flight", @@ -2673,7 +2674,7 @@ dependencies = [ "object-store", "pin-project", "prometheus", - "prost 0.12.2", + "prost 0.12.3", "query", "reqwest", "secrecy", @@ -2684,11 +2685,11 @@ dependencies = [ "snafu", "sql", "store-api", - "substrait 0.4.4", + "substrait 0.5.0", "table", "tokio", "tokio-stream", - "toml 0.7.8", + "toml 0.8.8", "tonic 0.10.2", "tower", "tower-http", @@ -2698,7 +2699,7 @@ dependencies = [ [[package]] name = "datatypes" -version = "0.4.4" +version = "0.5.0" dependencies = [ "arrow", "arrow-array", @@ -2747,16 +2748,16 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" dependencies = [ - "const-oid 0.9.5", + "const-oid 0.9.6", "pem-rfc7468 0.7.0", "zeroize", ] [[package]] name = "deranged" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3" +checksum = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc" dependencies = [ "powerfmt", "serde", @@ -2781,7 +2782,7 @@ checksum = "d150dea618e920167e5973d70ae6ece4385b7164e0d799fe7c122dd0a5d912ad" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -2865,7 +2866,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", - "const-oid 0.9.5", + "const-oid 0.9.6", "crypto-common", "subtle", ] @@ -3021,7 +3022,7 @@ checksum = "eecf8589574ce9b895052fa12d69af7a233f99e6107f5cb8dd1044f2a17bfdcb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -3033,7 +3034,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -3044,21 +3045,21 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "erased-serde" -version = "0.3.31" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c138974f9d5e7fe373eb04df7cae98833802ae4b11c24ac7039a21d5af4b26c" +checksum = "4adbf0983fe06bd3a5c19c8477a637c2389feb0994eca7a59e3b961054aa7c0a" dependencies = [ "serde", ] [[package]] name = "errno" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f258a7194e7f7c2a7837a8913aeab7fd8c383457034fa20ce4dd3dcb813e8eb8" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -3087,7 +3088,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f5231ad671c74ee5dc02753a0a9c855fe6e90de2a07acb2582f8a702470e04d1" dependencies = [ "http", - "prost 0.12.2", + "prost 0.12.3", "tokio", "tokio-stream", "tonic 0.10.2", @@ -3153,13 +3154,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" dependencies = [ "cfg-if 1.0.0", - "rustix 0.38.25", + "rustix 0.38.28", "windows-sys 0.48.0", ] [[package]] name = "file-engine" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "async-trait", @@ -3188,14 +3189,14 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.22" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.3.5", - "windows-sys 0.48.0", + "redox_syscall 0.4.1", + "windows-sys 0.52.0", ] [[package]] @@ -3290,7 +3291,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "frontend" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "arc-swap", @@ -3340,7 +3341,7 @@ dependencies = [ "operator", "partition", "prometheus", - "prost 0.12.2", + "prost 0.12.3", "query", "raft-engine", "regex", @@ -3354,10 +3355,10 @@ dependencies = [ "sqlparser 0.38.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=6a93567ae38d42be5c8d08b13c8ff4dde26502ef)", "store-api", "strfmt", - "substrait 0.4.4", + "substrait 0.5.0", "table", "tokio", - "toml 0.7.8", + "toml 0.8.8", "tonic 0.10.2", "tower", "uuid", @@ -3388,7 +3389,7 @@ checksum = "b0fa992f1656e1707946bbba340ad244f0814009ef8c0118eb7b658395f19a2e" dependencies = [ "frunk_proc_macro_helpers", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -3400,7 +3401,7 @@ dependencies = [ "frunk_core", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -3412,7 +3413,7 @@ dependencies = [ "frunk_core", "frunk_proc_macro_helpers", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -3439,9 +3440,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" dependencies = [ "futures-channel", "futures-core", @@ -3454,9 +3455,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff4dd66668b557604244583e3e1e1eada8c5c2e96a6d0d6653ede395b78bbacb" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" dependencies = [ "futures-core", "futures-sink", @@ -3464,15 +3465,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] name = "futures-executor" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" dependencies = [ "futures-core", "futures-task", @@ -3492,32 +3493,32 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bf34a163b5c4c52d0478a4d757da8fb65cabef42ba90515efee0f6f9fa45aaa" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" [[package]] name = "futures-macro" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] name = "futures-sink" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" [[package]] name = "futures-task" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efd193069b0ddadc69c46389b740bbccdd97203899b48d09c5f7969591d6bae2" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-timer" @@ -3527,9 +3528,9 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19526d624e703a3179b3d322efec918b6246ea0fa51d41124525f00f1cc8104" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" dependencies = [ "futures-channel", "futures-core", @@ -3599,9 +3600,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" [[package]] name = "git2" @@ -3627,7 +3628,7 @@ name = "greptime-proto" version = "0.1.0" source = "git+https://github.com/GreptimeTeam/greptime-proto.git?rev=a31ea166fc015ea7ff111ac94e26c3a5d64364d2#a31ea166fc015ea7ff111ac94e26c3a5d64364d2" dependencies = [ - "prost 0.12.2", + "prost 0.12.3", "serde", "serde_json", "strum 0.25.0", @@ -3692,9 +3693,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ "ahash 0.8.6", "allocator-api2", @@ -3706,7 +3707,7 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" dependencies = [ - "hashbrown 0.14.2", + "hashbrown 0.14.3", ] [[package]] @@ -3784,9 +3785,9 @@ checksum = "dfa686283ad6dd069f105e5ab091b04c62850d3e4cf5d67debad1933f55023df" [[package]] name = "hkdf" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ "hmac", ] @@ -3802,11 +3803,11 @@ dependencies = [ [[package]] name = "home" -version = "0.5.5" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -3833,9 +3834,9 @@ dependencies = [ [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", "http", @@ -3860,6 +3861,22 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" +[[package]] +name = "human-panic" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a79a67745be0cb8dd2771f03b24c2f25df98d5471fe7a595d668cfa2e6f843d" +dependencies = [ + "anstream", + "anstyle", + "backtrace", + "os_info", + "serde", + "serde_derive", + "toml 0.8.8", + "uuid", +] + [[package]] name = "humantime" version = "2.1.0" @@ -3878,9 +3895,9 @@ dependencies = [ [[package]] name = "hyper" -version = "0.14.27" +version = "0.14.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" dependencies = [ "bytes", "futures-channel", @@ -3893,7 +3910,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2 0.5.5", "tokio", "tower-service", "tracing", @@ -3909,7 +3926,7 @@ dependencies = [ "futures-util", "http", "hyper", - "rustls 0.21.9", + "rustls 0.21.10", "tokio", "tokio-rustls 0.24.1", ] @@ -3992,7 +4009,7 @@ dependencies = [ [[package]] name = "index" -version = "0.4.4" +version = "0.5.0" dependencies = [ "async-trait", "asynchronous-codec", @@ -4007,7 +4024,7 @@ dependencies = [ "greptime-proto", "mockall", "pin-project", - "prost 0.12.2", + "prost 0.12.3", "rand", "regex", "regex-automata 0.1.10", @@ -4034,7 +4051,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "serde", ] @@ -4059,9 +4076,9 @@ checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306" [[package]] name = "inferno" -version = "0.11.18" +version = "0.11.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abfb2e51b23c338595ae0b6bdaaa7a4a8b860b8d788a4331cb07b50fe5dea71b" +checksum = "321f0f839cd44a4686e9504b0a62b4d69a50b62072144c71c68f5873c167b8d9" dependencies = [ "ahash 0.8.6", "indexmap 2.1.0", @@ -4109,9 +4126,9 @@ checksum = "924df4f0e24e2e7f9cdd90babb0b96f93b20f3ecfa949ea9e6613756b8c8e1bf" [[package]] name = "inventory" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0508c56cfe9bfd5dfeb0c22ab9a6abfda2f27bdca422132e494266351ed8d83c" +checksum = "c8573b2b1fb643a372c73b23f4da5f888677feef3305146d68a539250a9bccc7" [[package]] name = "io-lifetimes" @@ -4159,7 +4176,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ "hermit-abi 0.3.3", - "rustix 0.38.25", + "rustix 0.38.28", "windows-sys 0.48.0", ] @@ -4181,11 +4198,20 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0" +dependencies = [ + "either", +] + [[package]] name = "itoa" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "jobserver" @@ -4198,9 +4224,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.65" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54c0c35952f67de54bb584e9fd912b3023117cbafc0a77d8f3dee1fb5f572fe8" +checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" dependencies = [ "wasm-bindgen", ] @@ -4218,13 +4244,14 @@ dependencies = [ [[package]] name = "jsonwebtoken" -version = "8.3.0" +version = "9.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" +checksum = "5c7ea04a7c5c055c175f189b6dc6ba036fd62306b58c66c9f6389036c503a3f4" dependencies = [ "base64 0.21.5", - "pem 1.1.1", - "ring 0.16.20", + "js-sys", + "pem", + "ring 0.17.7", "serde", "serde_json", "simple_asn1", @@ -4360,9 +4387,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.150" +version = "0.2.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" +checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" [[package]] name = "libgit2-sys" @@ -4440,9 +4467,9 @@ checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" [[package]] name = "linux-raw-sys" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" [[package]] name = "lock_api" @@ -4462,7 +4489,7 @@ checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] name = "log-store" -version = "0.4.4" +version = "0.5.0" dependencies = [ "async-stream", "async-trait", @@ -4551,7 +4578,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2994eeba8ed550fd9b47a0b38f0242bc3344e496483c6180b69139cc2fa5d1d7" dependencies = [ - "hashbrown 0.14.2", + "hashbrown 0.14.3", ] [[package]] @@ -4606,9 +4633,9 @@ dependencies = [ [[package]] name = "mach2" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d0d1830bcd151a6fc4aea1369af235b36c1528fe976b8ff678683c9995eade8" +checksum = "19b955cdeb2a02b9117f121ce63aa52d08ade45de53e48fe6a38b39c10f6f709" dependencies = [ "libc", ] @@ -4700,9 +4727,9 @@ dependencies = [ [[package]] name = "memmap2" -version = "0.8.0" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a5a03cefb0d953ec0be133036f14e109412fa594edc2f77227249db66cc3ed" +checksum = "45fd3a57831bf88bc63f8cebc0cf956116276e97fef3966103e96416209f7c92" dependencies = [ "libc", ] @@ -4736,7 +4763,7 @@ dependencies = [ [[package]] name = "meta-client" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "async-trait", @@ -4766,7 +4793,7 @@ dependencies = [ [[package]] name = "meta-srv" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anymap", "api", @@ -4801,7 +4828,7 @@ dependencies = [ "once_cell", "parking_lot 0.12.1", "prometheus", - "prost 0.12.2", + "prost 0.12.3", "rand", "regex", "serde", @@ -4814,7 +4841,7 @@ dependencies = [ "table", "tokio", "tokio-stream", - "toml 0.7.8", + "toml 0.8.8", "tonic 0.10.2", "tower", "tracing", @@ -4844,9 +4871,8 @@ dependencies = [ [[package]] name = "metric-engine" -version = "0.4.4" +version = "0.5.0" dependencies = [ - "ahash 0.8.6", "api", "aquamarine", "async-trait", @@ -4862,6 +4888,7 @@ dependencies = [ "datatypes", "lazy_static", "mito2", + "mur3", "object-store", "prometheus", "serde_json", @@ -4903,9 +4930,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" dependencies = [ "libc", "log", @@ -4915,7 +4942,7 @@ dependencies = [ [[package]] name = "mito2" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anymap", "api", @@ -4957,7 +4984,7 @@ dependencies = [ "parquet", "paste", "prometheus", - "prost 0.12.2", + "prost 0.12.3", "regex", "serde", "serde_json", @@ -5039,6 +5066,12 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" +[[package]] +name = "mur3" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97af489e1e21b68de4c390ecca6703318bc1aa16e9733bcb62c089b73c6fbb1b" + [[package]] name = "mysql-common-derive" version = "0.30.2" @@ -5052,7 +5085,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", "termcolor", "thiserror", ] @@ -5075,11 +5108,11 @@ dependencies = [ "mio", "mysql_common", "once_cell", - "pem 3.0.2", + "pem", "percent-encoding", "pin-project", "rand", - "rustls 0.21.9", + "rustls 0.21.10", "rustls-pemfile 1.0.4", "serde", "serde_json", @@ -5303,7 +5336,7 @@ checksum = "cfb77679af88f8b125209d354a202862602672222e7f2313fdd6dc349bad4712" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -5398,16 +5431,16 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" dependencies = [ "memchr", ] [[package]] name = "object-store" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "async-trait", @@ -5451,9 +5484,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "oorandom" @@ -5584,7 +5617,7 @@ source = "git+https://github.com/waynexia/opentelemetry-rust.git?rev=33841b38dda dependencies = [ "opentelemetry 0.21.0 (git+https://github.com/waynexia/opentelemetry-rust.git?rev=33841b38dda79b15f2024952be5f32533325ca02)", "opentelemetry_sdk 0.20.0", - "prost 0.12.2", + "prost 0.12.3", "tonic 0.10.2", ] @@ -5622,7 +5655,7 @@ dependencies = [ "glob", "once_cell", "opentelemetry 0.21.0 (git+https://github.com/waynexia/opentelemetry-rust.git?rev=33841b38dda79b15f2024952be5f32533325ca02)", - "ordered-float 4.1.1", + "ordered-float 4.2.0", "percent-encoding", "rand", "thiserror", @@ -5642,7 +5675,7 @@ dependencies = [ "glob", "once_cell", "opentelemetry 0.21.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ordered-float 4.1.1", + "ordered-float 4.2.0", "percent-encoding", "rand", "thiserror", @@ -5652,7 +5685,7 @@ dependencies = [ [[package]] name = "operator" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "async-trait", @@ -5696,7 +5729,7 @@ dependencies = [ "sql", "sqlparser 0.38.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=6a93567ae38d42be5c8d08b13c8ff4dde26502ef)", "store-api", - "substrait 0.4.4", + "substrait 0.5.0", "table", "tokio", "tonic 0.10.2", @@ -5752,9 +5785,9 @@ dependencies = [ [[package]] name = "ordered-float" -version = "4.1.1" +version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "536900a8093134cf9ccf00a27deb3532421099e958d9dd431135d0c7543ca1e8" +checksum = "a76df7075c7d4d01fdcb46c912dd17fba5b60c78ea480b475f2b6ab6f666584e" dependencies = [ "num-traits", ] @@ -5776,7 +5809,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4d6a8c22fc714f0c2373e6091bf6f5e9b37b1bc0b1184874b7e0a4e303d318f" dependencies = [ "dlv-list 0.5.2", - "hashbrown 0.14.2", + "hashbrown 0.14.3", +] + +[[package]] +name = "os_info" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "006e42d5b888366f1880eda20371fedde764ed2213dc8496f49622fa0c99cd5e" +dependencies = [ + "log", + "serde", + "winapi", ] [[package]] @@ -5891,7 +5935,7 @@ dependencies = [ "chrono", "flate2", "futures", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "lz4", "num", "num-bigint", @@ -5916,7 +5960,7 @@ dependencies = [ [[package]] name = "partition" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "async-trait", @@ -5961,18 +6005,9 @@ checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" [[package]] name = "pem" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" -dependencies = [ - "base64 0.13.1", -] - -[[package]] -name = "pem" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3163d2912b7c3b52d651a055f2c7eec9ba5cd22d26ef75b8dd3a59980b185923" +checksum = "1b8fcc794035347fb64beda2d3b462595dd2753e3f268d89c5aae77e8cf2c310" dependencies = [ "base64 0.21.5", "serde", @@ -6033,7 +6068,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -6059,9 +6094,9 @@ dependencies = [ [[package]] name = "pgwire" -version = "0.17.0" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7f181d085a224ff2b2ea46bd2066b487b87e83dabbcdfe60bf3f027f5d0593" +checksum = "3b277432819ee6b76bf56de5e91eae578d6b332bd6f05f963ee81fc788bc886f" dependencies = [ "async-trait", "base64 0.21.5", @@ -6075,7 +6110,7 @@ dependencies = [ "md5", "postgres-types", "rand", - "ring 0.17.5", + "ring 0.17.7", "stringprep", "thiserror", "time", @@ -6150,7 +6185,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -6184,7 +6219,7 @@ checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" dependencies = [ "der 0.7.8", "pkcs8 0.10.2", - "spki 0.7.2", + "spki 0.7.3", ] [[package]] @@ -6205,14 +6240,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der 0.7.8", - "spki 0.7.2", + "spki 0.7.3", ] [[package]] name = "pkg-config" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +checksum = "69d3587f8a9e599cc7ec2c00e331f71c4e69a5f9a4b8a6efd5b07466b9736f9a" [[package]] name = "plotters" @@ -6244,7 +6279,7 @@ dependencies = [ [[package]] name = "plugins" -version = "0.4.4" +version = "0.5.0" dependencies = [ "auth", "common-base", @@ -6267,9 +6302,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.5.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bccab0e7fd7cc19f820a1c8c91720af652d0c88dc9664dd72aef2614f04af3b" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" [[package]] name = "postgres-protocol" @@ -6323,9 +6358,9 @@ dependencies = [ "nix 0.26.4", "once_cell", "parking_lot 0.12.1", - "prost 0.12.2", - "prost-build 0.12.2", - "prost-derive 0.12.2", + "prost 0.12.3", + "prost-build 0.12.3", + "prost-derive 0.12.3", "protobuf", "sha2", "smallvec", @@ -6403,7 +6438,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -6451,9 +6486,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.69" +version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +checksum = "75cb1540fadbd5b8fbccc4dddad2734eba435053f725621c070711a14bb5f4b8" dependencies = [ "unicode-ident", ] @@ -6502,7 +6537,7 @@ dependencies = [ [[package]] name = "promql" -version = "0.4.4" +version = "0.5.0" dependencies = [ "async-recursion", "async-trait", @@ -6520,7 +6555,7 @@ dependencies = [ "lazy_static", "prometheus", "promql-parser", - "prost 0.12.2", + "prost 0.12.3", "query", "session", "snafu", @@ -6553,12 +6588,12 @@ dependencies = [ [[package]] name = "prost" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5a410fc7882af66deb8d01d01737353cf3ad6204c408177ba494291a626312" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" dependencies = [ "bytes", - "prost-derive 0.12.2", + "prost-derive 0.12.3", ] [[package]] @@ -6585,9 +6620,9 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fa3d084c8704911bfefb2771be2f9b6c5c0da7343a71e0021ee3c665cada738" +checksum = "c55e02e35260070b6f716a2423c2ff1c3bb1642ddca6f99e1f26d06268a0e2d2" dependencies = [ "bytes", "heck", @@ -6597,10 +6632,10 @@ dependencies = [ "once_cell", "petgraph", "prettyplease 0.2.15", - "prost 0.12.2", - "prost-types 0.12.2", + "prost 0.12.3", + "prost-types 0.12.3", "regex", - "syn 2.0.39", + "syn 2.0.43", "tempfile", "which", ] @@ -6620,15 +6655,15 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "065717a5dfaca4a83d2fe57db3487b311365200000551d7a364e715dbf4346bc" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" dependencies = [ "anyhow", "itertools 0.11.0", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -6642,11 +6677,11 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8339f32236f590281e2f6368276441394fcd1b2133b549cc895d0ae80f2f9a52" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" dependencies = [ - "prost 0.12.2", + "prost 0.12.3", ] [[package]] @@ -6711,7 +6746,7 @@ dependencies = [ [[package]] name = "puffin" -version = "0.4.4" +version = "0.5.0" dependencies = [ "async-trait", "bitflags 2.4.1", @@ -6822,7 +6857,7 @@ dependencies = [ [[package]] name = "query" -version = "0.4.4" +version = "0.5.0" dependencies = [ "ahash 0.8.6", "api", @@ -6880,7 +6915,7 @@ dependencies = [ "stats-cli", "store-api", "streaming-stats", - "substrait 0.4.4", + "substrait 0.5.0", "table", "tokio", "tokio-stream", @@ -6950,7 +6985,7 @@ dependencies = [ "crossbeam", "fail", "fs2", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "hex", "if_chain", "lazy_static", @@ -7059,15 +7094,6 @@ dependencies = [ "bitflags 1.3.2", ] -[[package]] -name = "redox_syscall" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.4.1" @@ -7160,15 +7186,16 @@ dependencies = [ [[package]] name = "reqsign" -version = "0.14.3" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad14258ddd8ef6e564d57a94613e138cc9c21ef8a1fec547206d853213c7959" +checksum = "dce87f66ba6c6acef277a729f989a0eca946cb9ce6a15bcc036bda0f72d4b9fd" dependencies = [ "anyhow", "async-trait", "base64 0.21.5", "chrono", "form_urlencoded", + "getrandom", "hex", "hmac", "home", @@ -7180,7 +7207,7 @@ dependencies = [ "quick-xml 0.31.0", "rand", "reqwest", - "rsa 0.9.4", + "rsa 0.9.6", "rust-ini 0.20.0", "serde", "serde_json", @@ -7191,9 +7218,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.22" +version = "0.11.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" +checksum = "37b1ae8d9ac08420c66222fb9096fc5de435c3c48542bc5336c51892cffafb41" dependencies = [ "base64 0.21.5", "bytes", @@ -7213,7 +7240,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.9", + "rustls 0.21.10", "rustls-native-certs", "rustls-pemfile 1.0.4", "serde", @@ -7300,7 +7327,7 @@ checksum = "853977598f084a492323fe2f7896b4100a86284ee8473612de60021ea341310f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -7320,9 +7347,9 @@ dependencies = [ [[package]] name = "ring" -version = "0.17.5" +version = "0.17.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb0205304757e5d899b9c2e448b867ffd03ae7f988002e47cd24954391394d0b" +checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" dependencies = [ "cc", "getrandom", @@ -7334,12 +7361,13 @@ dependencies = [ [[package]] name = "rkyv" -version = "0.7.42" +version = "0.7.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0200c8230b013893c0b2d6213d6ec64ed2b9be2e0e016682b7224ff82cff5c58" +checksum = "527a97cdfef66f65998b5f3b637c26f5a5ec09cc52a3f9932313ac645f4190f5" dependencies = [ "bitvec", "bytecheck", + "bytes", "hashbrown 0.12.3", "ptr_meta", "rend", @@ -7351,9 +7379,9 @@ dependencies = [ [[package]] name = "rkyv_derive" -version = "0.7.42" +version = "0.7.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d" +checksum = "b5c462a1328c8e67e4d6dbad1eb0355dd43e8ab432c6e227a43657f16ade5033" dependencies = [ "proc-macro2", "quote", @@ -7393,11 +7421,11 @@ dependencies = [ [[package]] name = "rsa" -version = "0.9.4" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a3211b01eea83d80687da9eef70e39d65144a3894866a5153a2723e425a157f" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" dependencies = [ - "const-oid 0.9.5", + "const-oid 0.9.6", "digest", "num-bigint-dig", "num-integer", @@ -7406,7 +7434,7 @@ dependencies = [ "pkcs8 0.10.2", "rand_core", "signature", - "spki 0.7.2", + "spki 0.7.3", "subtle", "zeroize", ] @@ -7493,7 +7521,7 @@ dependencies = [ "proc-macro2", "quote", "rust-embed-utils", - "syn 2.0.39", + "syn 2.0.43", "walkdir", ] @@ -7580,15 +7608,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.25" +version = "0.38.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc99bc2d4f1fed22595588a013687477aedf3cdcfb26558c559edb67b4d9b22e" +checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" dependencies = [ "bitflags 2.4.1", "errno", "libc", - "linux-raw-sys 0.4.11", - "windows-sys 0.48.0", + "linux-raw-sys 0.4.12", + "windows-sys 0.52.0", ] [[package]] @@ -7605,12 +7633,12 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.9" +version = "0.21.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "629648aced5775d558af50b2b4c7b02983a04b312126d45eeead26e7caa498b9" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" dependencies = [ "log", - "ring 0.17.5", + "ring 0.17.7", "rustls-webpki 0.101.7", "sct", ] @@ -7622,7 +7650,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe6b63262c9fcac8659abfaa96cac103d28166d3ff3eaf8f412e19f3ae9e5a48" dependencies = [ "log", - "ring 0.17.5", + "ring 0.17.7", "rustls-pki-types", "rustls-webpki 0.102.0", "subtle", @@ -7662,9 +7690,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7673e0aa20ee4937c6aacfc12bb8341cfbf054cdd21df6bec5fd0629fe9339b" +checksum = "9e9d979b3ce68192e42760c7810125eb6cf2ea10efae545a156063e61f314e2a" [[package]] name = "rustls-webpki" @@ -7672,7 +7700,7 @@ version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring 0.17.5", + "ring 0.17.7", "untrusted 0.9.0", ] @@ -7682,7 +7710,7 @@ version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de2635c8bc2b88d367767c5de8ea1d8db9af3f6219eba28442242d9ab81d1b89" dependencies = [ - "ring 0.17.5", + "ring 0.17.7", "rustls-pki-types", "untrusted 0.9.0", ] @@ -8024,9 +8052,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.15" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" [[package]] name = "safe-lock" @@ -8147,7 +8175,7 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "script" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "arc-swap", @@ -8209,7 +8237,7 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring 0.17.5", + "ring 0.17.7", "untrusted 0.9.0", ] @@ -8284,7 +8312,7 @@ checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -8327,14 +8355,14 @@ checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] name = "serde_spanned" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80" +checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" dependencies = [ "serde", ] @@ -8348,7 +8376,7 @@ dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -8389,14 +8417,14 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] name = "serde_yaml" -version = "0.9.27" +version = "0.9.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c" +checksum = "a15e0ef66bf939a7c890a0bf6d5a733c70202225f9888a89ed5c62298b019129" dependencies = [ "indexmap 2.1.0", "itoa", @@ -8407,7 +8435,7 @@ dependencies = [ [[package]] name = "servers" -version = "0.4.4" +version = "0.5.0" dependencies = [ "aide", "api", @@ -8466,7 +8494,7 @@ dependencies = [ "pprof", "prometheus", "promql-parser", - "prost 0.12.2", + "prost 0.12.3", "query", "rand", "regex", @@ -8503,7 +8531,7 @@ dependencies = [ [[package]] name = "session" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "arc-swap", @@ -8694,9 +8722,9 @@ dependencies = [ [[package]] name = "snap" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e9f0ab6ef7eb7353d9119c170a436d1bf248eea575ac42d19d12f4e34130831" +checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" [[package]] name = "socket2" @@ -8754,9 +8782,9 @@ dependencies = [ [[package]] name = "spki" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der 0.7.8", @@ -8764,7 +8792,7 @@ dependencies = [ [[package]] name = "sql" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "common-base", @@ -8790,11 +8818,11 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b7b278788e7be4d0d29c0f39497a0eef3fba6bbc8e70d8bf7fde46edeaa9e85" +checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c" dependencies = [ - "itertools 0.11.0", + "itertools 0.12.0", "nom", "unicode_categories", ] @@ -8816,10 +8844,10 @@ dependencies = [ [[package]] name = "sqlness-runner" -version = "0.4.4" +version = "0.5.0" dependencies = [ "async-trait", - "clap 4.4.8", + "clap 4.4.11", "client", "common-base", "common-error", @@ -9022,7 +9050,7 @@ dependencies = [ [[package]] name = "store-api" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "aquamarine", @@ -9147,7 +9175,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -9162,7 +9190,7 @@ dependencies = [ [[package]] name = "substrait" -version = "0.4.4" +version = "0.5.0" dependencies = [ "async-recursion", "async-trait", @@ -9179,7 +9207,7 @@ dependencies = [ "datatypes", "futures", "promql", - "prost 0.12.2", + "prost 0.12.3", "session", "snafu", "substrait 0.17.1", @@ -9196,15 +9224,15 @@ dependencies = [ "git2", "heck", "prettyplease 0.2.15", - "prost 0.12.2", - "prost-build 0.12.2", - "prost-types 0.12.2", + "prost 0.12.3", + "prost-build 0.12.3", + "prost-types 0.12.3", "schemars", "semver", "serde", "serde_json", "serde_yaml", - "syn 2.0.39", + "syn 2.0.43", "typify", "walkdir", ] @@ -9217,21 +9245,21 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "symbolic-common" -version = "12.7.0" +version = "12.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39eac77836da383d35edbd9ff4585b4fc1109929ff641232f2e9a1aefdfc9e91" +checksum = "1cccfffbc6bb3bb2d3a26cd2077f4d055f6808d266f9d4d158797a4c60510dfe" dependencies = [ "debugid", - "memmap2 0.8.0", + "memmap2 0.9.3", "stable_deref_trait", "uuid", ] [[package]] name = "symbolic-demangle" -version = "12.7.0" +version = "12.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee1608a1d13061fb0e307a316de29f6c6e737b05459fe6bbf5dd8d7837c4fb7" +checksum = "76a99812da4020a67e76c4eb41f08c87364c14170495ff780f30dd519c221a68" dependencies = [ "cpp_demangle", "rustc-demangle", @@ -9251,9 +9279,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.39" +version = "2.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" +checksum = "ee659fb5f3d355364e1f3e5bc10fb82068efbf824a1e9d1c9504244a6469ad53" dependencies = [ "proc-macro2", "quote", @@ -9278,7 +9306,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -9310,7 +9338,7 @@ dependencies = [ [[package]] name = "table" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anymap", "async-trait", @@ -9381,7 +9409,7 @@ dependencies = [ "cfg-if 1.0.0", "fastrand 2.0.1", "redox_syscall 0.4.1", - "rustix 0.38.25", + "rustix 0.38.28", "windows-sys 0.48.0", ] @@ -9422,7 +9450,7 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" [[package]] name = "tests-integration" -version = "0.4.4" +version = "0.5.0" dependencies = [ "api", "async-trait", @@ -9464,7 +9492,7 @@ dependencies = [ "operator", "partition", "paste", - "prost 0.12.2", + "prost 0.12.3", "query", "rand", "rskafka", @@ -9480,7 +9508,7 @@ dependencies = [ "sql", "sqlx", "store-api", - "substrait 0.4.4", + "substrait 0.5.0", "table", "tempfile", "time", @@ -9514,22 +9542,22 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" -version = "1.0.50" +version = "1.0.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" +checksum = "f11c217e1416d6f036b870f14e0413d480dbf28edbee1f877abaf0206af43bb7" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.50" +version = "1.0.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" +checksum = "01742297787513b79cf8e29d1056ede1313e2420b7b3b15d0a768b4921f549df" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -9596,9 +9624,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" +checksum = "f657ba42c3f86e7680e53c8cd3af8abbe56b5491790b46e22e19c0d57463583e" dependencies = [ "deranged", "itoa", @@ -9616,18 +9644,18 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" +checksum = "26197e33420244aeb70c3e8c78376ca46571bc4e701e4791c2cd9f57dcb3a43f" dependencies = [ "time-core", ] [[package]] name = "timsort" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cb4fa83bb73adf1c7219f4fe4bf3c0ac5635e4e51e070fad5df745a41bedfb8" +checksum = "639ce8ef6d2ba56be0383a94dd13b92138d58de44c62618303bb798fa92bdc00" [[package]] name = "tiny-keccak" @@ -9665,9 +9693,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.34.0" +version = "1.35.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c014766411e834f7af5b8f4cf46257aab4036ca95e9d2c144a10f59ad6f5b9" +checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104" dependencies = [ "backtrace", "bytes", @@ -9701,7 +9729,32 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", +] + +[[package]] +name = "tokio-metrics" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eace09241d62c98b7eeb1107d4c5c64ca3bd7da92e8c218c153ab3a78f9be112" +dependencies = [ + "futures-util", + "pin-project-lite", + "tokio", + "tokio-stream", +] + +[[package]] +name = "tokio-metrics-collector" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767da47381602cc481653456823b3ebb600e83d5dd4e0293da9b5566c6c00f0" +dependencies = [ + "lazy_static", + "parking_lot 0.12.1", + "prometheus", + "tokio", + "tokio-metrics", ] [[package]] @@ -9761,7 +9814,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.9", + "rustls 0.21.10", "tokio", ] @@ -9826,14 +9879,14 @@ dependencies = [ [[package]] name = "toml" -version = "0.7.8" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" +checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.19.15", + "toml_edit 0.21.0", ] [[package]] @@ -9852,8 +9905,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ "indexmap 2.1.0", - "serde", - "serde_spanned", "toml_datetime", "winnow", ] @@ -9869,6 +9920,19 @@ dependencies = [ "winnow", ] +[[package]] +name = "toml_edit" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03" +dependencies = [ + "indexmap 2.1.0", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + [[package]] name = "tonic" version = "0.9.2" @@ -9915,8 +9979,8 @@ dependencies = [ "hyper-timeout", "percent-encoding", "pin-project", - "prost 0.12.2", - "rustls 0.21.9", + "prost 0.12.3", + "rustls 0.21.10", "rustls-pemfile 1.0.4", "tokio", "tokio-rustls 0.24.1", @@ -9948,9 +10012,9 @@ checksum = "9d021fc044c18582b9a2408cd0dd05b1596e3ecdb5c4df822bb0183545683889" dependencies = [ "prettyplease 0.2.15", "proc-macro2", - "prost-build 0.12.2", + "prost-build 0.12.3", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -9959,8 +10023,8 @@ version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fa37c513df1339d197f4ba21d28c918b9ef1ac1768265f11ecb6b7f1cba1b76" dependencies = [ - "prost 0.12.2", - "prost-types 0.12.2", + "prost 0.12.3", + "prost-types 0.12.3", "tokio", "tokio-stream", "tonic 0.10.2", @@ -10061,7 +10125,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -10146,15 +10210,15 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee8098afad3fb0c54a9007aab6804558410503ad676d4633f9c2559a00ac0f" +checksum = "859eb650cfee7434994602c3a68b25d77ad9e68c8a6cd491616ef86661382eb3" [[package]] name = "try-lock" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "try_from" @@ -10184,9 +10248,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "typetag" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80960fd143d4c96275c0e60b08f14b81fbb468e79bc0ef8fbda69fb0afafae43" +checksum = "196976efd4a62737b3a2b662cda76efb448d099b1049613d7a5d72743c611ce0" dependencies = [ "erased-serde", "inventory", @@ -10197,13 +10261,13 @@ dependencies = [ [[package]] name = "typetag-impl" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfc13d450dc4a695200da3074dacf43d449b968baee95e341920e47f61a3b40f" +checksum = "2eea6765137e2414c44c7b1e07c73965a118a72c46148e1e168b3fc9d3ccf3aa" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -10229,7 +10293,7 @@ dependencies = [ "regress", "schemars", "serde_json", - "syn 2.0.39", + "syn 2.0.43", "thiserror", "unicode-ident", ] @@ -10246,7 +10310,7 @@ dependencies = [ "serde", "serde_json", "serde_tokenstream", - "syn 2.0.39", + "syn 2.0.43", "typify-impl", ] @@ -10407,9 +10471,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" +checksum = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416" [[package]] name = "unicode-casing" @@ -10472,9 +10536,9 @@ checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" [[package]] name = "unsafe-libyaml" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28467d3e1d3c6586d8f25fa243f544f5800fec42d97032474e17222c2b75cfa" +checksum = "ab4c90930b95a82d00dc9e9ac071b4991924390d46cbd0dfe566148667605e4b" [[package]] name = "untrusted" @@ -10532,7 +10596,7 @@ checksum = "f49e7f3f3db8040a100710a11932239fd30697115e2ba4107080d8252939845e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -10618,9 +10682,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7daec296f25a1bae309c0cd5c29c4b260e510e6d813c286b19eaadf409d40fce" +checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -10628,24 +10692,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e397f4664c0e4e428e8313a469aaa58310d302159845980fd23b0f22a847f217" +checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9afec9963e3d0994cac82455b2b3502b81a7f40f9a0d32181f7528d9f4b43e02" +checksum = "ac36a15a220124ac510204aec1c3e5db8a22ab06fd6706d881dc6149f8ed9a12" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -10655,9 +10719,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5961017b3b08ad5f3fe39f1e79877f8ee7c23c5e5fd5eb80de95abc41f1f16b2" +checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -10665,22 +10729,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5353b8dab669f5e10f5bd76df26a9360c748f054f862ff5f3f8aae0c7fb3907" +checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d046c5d029ba91a1ed14da14dca44b68bf2f124cfbaf741c54151fdb3e0750b" +checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" [[package]] name = "wasm-streams" @@ -10697,9 +10761,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.65" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5db499c5f66323272151db0e666cd34f78617522fb0c1604d31a27c50c206a85" +checksum = "50c24a44ec86bb68fbecd1b3efed7e85ea5621b39b35ef2766b66cd984f8010f" dependencies = [ "js-sys", "wasm-bindgen", @@ -10707,9 +10771,9 @@ dependencies = [ [[package]] name = "web-time" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57099a701fb3a8043f993e8228dc24229c7b942e2b009a1b962e54489ba1d3bf" +checksum = "aa30049b1c872b72c89866d458eae9f20380ab280ffd1b1e18df2d3e2d98cfe0" dependencies = [ "js-sys", "wasm-bindgen", @@ -10721,7 +10785,7 @@ version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" dependencies = [ - "ring 0.17.5", + "ring 0.17.7", "untrusted 0.9.0", ] @@ -10749,7 +10813,7 @@ dependencies = [ "either", "home", "once_cell", - "rustix 0.38.25", + "rustix 0.38.28", ] [[package]] @@ -10849,6 +10913,15 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.0", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -10879,6 +10952,21 @@ dependencies = [ "windows_x86_64_msvc 0.48.5", ] +[[package]] +name = "windows-targets" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -10891,6 +10979,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" + [[package]] name = "windows_aarch64_msvc" version = "0.39.0" @@ -10909,6 +11003,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" + [[package]] name = "windows_i686_gnu" version = "0.39.0" @@ -10927,6 +11027,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +[[package]] +name = "windows_i686_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" + [[package]] name = "windows_i686_msvc" version = "0.39.0" @@ -10945,6 +11051,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +[[package]] +name = "windows_i686_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" + [[package]] name = "windows_x86_64_gnu" version = "0.39.0" @@ -10963,6 +11075,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -10975,6 +11093,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" + [[package]] name = "windows_x86_64_msvc" version = "0.39.0" @@ -10993,11 +11117,17 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + [[package]] name = "winnow" -version = "0.5.19" +version = "0.5.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "829846f3e3db426d4cee4510841b71a8e58aa2a76b1132579487ae430ccd9c7b" +checksum = "9b5c3db89721d50d0e2a673f5043fc4722f76dcc352d7b1ab8b8288bed4ed2c5" dependencies = [ "memchr", ] @@ -11041,10 +11171,10 @@ dependencies = [ "chrono", "der 0.7.8", "hex", - "pem 3.0.2", - "ring 0.17.5", + "pem", + "ring 0.17.7", "signature", - "spki 0.7.2", + "spki 0.7.3", "thiserror", "zeroize", ] @@ -11075,22 +11205,22 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.7.26" +version = "0.7.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e97e415490559a91254a2979b4829267a57d2fcd741a98eee8b722fb57289aa0" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.26" +version = "0.7.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd7e48ccf166952882ca8bd778a43502c64f33bf94c12ebe2a7f08e5a0f6689f" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] @@ -11110,7 +11240,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.43", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 9d6508b0d569..0e38d914eccb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -58,7 +58,7 @@ members = [ resolver = "2" [workspace.package] -version = "0.4.4" +version = "0.5.0" edition = "2021" license = "Apache-2.0" @@ -132,7 +132,7 @@ tempfile = "3" tokio = { version = "1.28", features = ["full"] } tokio-stream = { version = "0.1" } tokio-util = { version = "0.7", features = ["io-util", "compat"] } -toml = "0.7" +toml = "0.8.8" tonic = { version = "0.10", features = ["tls"] } uuid = { version = "1", features = ["serde", "v4", "fast-rng"] } @@ -192,8 +192,6 @@ table = { path = "src/table" } git = "https://github.com/GreptimeTeam/greptime-meter.git" rev = "abbd357c1e193cd270ea65ee7652334a150b628f" -[profile.dev] - [profile.release] debug = 1 diff --git a/config/datanode.example.toml b/config/datanode.example.toml index 342e10bfe19f..bd3f8fc2eec9 100644 --- a/config/datanode.example.toml +++ b/config/datanode.example.toml @@ -50,13 +50,13 @@ read_batch_size = 128 sync_write = false # Kafka wal options, see `standalone.example.toml`. -# broker_endpoints = ["127.0.0.1:9090"] +# broker_endpoints = ["127.0.0.1:9092"] # max_batch_size = "4MB" # linger = "200ms" -# max_wait_time = "100ms" +# produce_record_timeout = "100ms" # backoff_init = "500ms" # backoff_max = "10s" -# backoff_base = 2.0 +# backoff_base = 2 # backoff_deadline = "5mins" # Storage options, see `standalone.example.toml`. diff --git a/config/metasrv.example.toml b/config/metasrv.example.toml index 120f19255f3a..ff05a9c095e8 100644 --- a/config/metasrv.example.toml +++ b/config/metasrv.example.toml @@ -52,8 +52,8 @@ provider = "raft_engine" # There're none raft-engine wal config since meta srv only involves in remote wal currently. # Kafka wal config. -# The broker endpoints of the Kafka cluster. ["127.0.0.1:9090"] by default. -# broker_endpoints = ["127.0.0.1:9090"] +# The broker endpoints of the Kafka cluster. ["127.0.0.1:9092"] by default. +# broker_endpoints = ["127.0.0.1:9092"] # Number of topics to be created upon start. # num_topics = 64 # Topic selector type. @@ -65,7 +65,7 @@ provider = "raft_engine" # Number of partitions per topic. # num_partitions = 1 # Expected number of replicas of each partition. -# replication_factor = 3 +# replication_factor = 1 # Above which a topic creation operation will be cancelled. # create_topic_timeout = "30s" # The initial backoff for kafka clients. @@ -73,7 +73,7 @@ provider = "raft_engine" # The maximum backoff for kafka clients. # backoff_max = "10s" # Exponential backoff rate, i.e. next backoff = base * current backoff. -# backoff_base = 2.0 +# backoff_base = 2 # Stop reconnecting if the total wait time reaches the deadline. If this config is missing, the reconnecting won't terminate. # backoff_deadline = "5mins" diff --git a/config/standalone.example.toml b/config/standalone.example.toml index a9efb4d10365..7db8477ec78e 100644 --- a/config/standalone.example.toml +++ b/config/standalone.example.toml @@ -80,7 +80,7 @@ enable = true # Whether to enable Prometheus remote write and read in HTTP API, true by default. enable = true -[wal_meta] +[wal] # Available wal providers: # - "raft_engine" (default) # - "kafka" @@ -88,9 +88,10 @@ provider = "raft_engine" # There're none raft-engine wal config since meta srv only involves in remote wal currently. -# Kafka wal config. -# The broker endpoints of the Kafka cluster. ["127.0.0.1:9090"] by default. -# broker_endpoints = ["127.0.0.1:9090"] +# Kafka wal options. +# The broker endpoints of the Kafka cluster. ["127.0.0.1:9092"] by default. +# broker_endpoints = ["127.0.0.1:9092"] + # Number of topics to be created upon start. # num_topics = 64 # Topic selector type. @@ -102,40 +103,23 @@ provider = "raft_engine" # Number of partitions per topic. # num_partitions = 1 # Expected number of replicas of each partition. -# replication_factor = 3 -# Above which a topic creation operation will be cancelled. -# create_topic_timeout = "30s" -# The initial backoff for kafka clients. -# backoff_init = "500ms" -# The maximum backoff for kafka clients. -# backoff_max = "10s" -# Exponential backoff rate, i.e. next backoff = base * current backoff. -# backoff_base = 2.0 -# Stop reconnecting if the total wait time reaches the deadline. If this config is missing, the reconnecting won't terminate. -# backoff_deadline = "5mins" - -# WAL options for datanode. -[wal_datanode] -# Available wal providers: -# - "RaftEngine" (default) -# - "Kafka" -provider = "raft_engine" +# replication_factor = 1 -# Kafka wal options. -# The broker endpoints of the Kafka cluster. ["127.0.0.1:9090"] by default. -# broker_endpoints = ["127.0.0.1:9090"] # The maximum log size a kafka batch producer could buffer. # max_batch_size = "4MB" # The linger duration of a kafka batch producer. # linger = "200ms" # The maximum amount of time (in milliseconds) to wait for Kafka records to be returned. -# max_wait_time = "100ms" +# produce_record_timeout = "100ms" +# Above which a topic creation operation will be cancelled. +# create_topic_timeout = "30s" + # The initial backoff for kafka clients. # backoff_init = "500ms" # The maximum backoff for kafka clients. # backoff_max = "10s" # Exponential backoff rate, i.e. next backoff = base * current backoff. -# backoff_base = 2.0 +# backoff_base = 2 # Stop reconnecting if the total wait time reaches the deadline. If this config is missing, the reconnecting won't terminate. # backoff_deadline = "5mins" @@ -234,6 +218,8 @@ parallel_scan_channel_size = 32 # otlp_endpoint = "localhost:4317" # The percentage of tracing will be sampled and exported. Valid range `[0, 1]`, 1 means all traces are sampled, 0 means all traces are not sampled, the default value is 1. ratio > 1 are treated as 1. Fractions < 0 are treated as 0 # tracing_sample_ratio = 1.0 +# Whether to append logs to stdout. Defaults to true. +# append_stdout = true # Standalone export the metrics generated by itself # encoded to Prometheus remote-write format diff --git a/docs/rfcs/2023-12-22-enclose-column-id.md b/docs/rfcs/2023-12-22-enclose-column-id.md new file mode 100644 index 000000000000..ee7b4c61a2de --- /dev/null +++ b/docs/rfcs/2023-12-22-enclose-column-id.md @@ -0,0 +1,44 @@ +--- +Feature Name: Enclose Column Id +Tracking Issue: https://github.com/GreptimeTeam/greptimedb/issues/2982 +Date: 2023-12-22 +Author: "Ruihang Xia " +--- + +# Summary +This RFC proposes to enclose the usage of `ColumnId` into the region engine only. + +# Motivation +`ColumnId` is an identifier for columns. It's assigned by meta server, stored in `TableInfo` and `RegionMetadata` and used in region engine to distinguish columns. + +At present, Both Frontend, Datanode and Metasrv are aware of `ColumnId` but it's only used in region engine. Thus this RFC proposes to remove it from Frontend (mainly used in `TableInfo`) and Metasrv. + +# Details + +`ColumnId` is used widely on both read and write paths. Removing it from Frontend and Metasrv implies several things: + +- A column may have different column id in different regions. +- A column is identified by its name in all components. +- Column order in the region engine is not restricted, i.e., no need to be in the same order with table info. + +The first thing doesn't matter IMO. This concept doesn't exist anymore outside of region server, and each region is autonomous and independent -- the only guarantee it should hold is those columns exist. But if we consider region repartition, where the SST file would be re-assign to different regions, things would become a bit more complicated. A possible solution is store the relation between name and ColumnId in the manifest, but it's out of the scope of this RFC. We can likely give a workaround by introducing a indirection mapping layer of different version of partitions. + +And more importantly, we can still assume columns have the same column ids across regions. We have procedure to maintain consistency between regions and the region engine should ensure alterations are idempotent. So it is possible that region repartition doesn't need to consider column ids or other region metadata in the future. + +Users write and query column by their names, not by ColumnId or something else. The second point also means to change the column reference in ScanRequest from index to name. This change can hugely alleviate the misuse of the column index, which has given us many surprises. + +And for the last one, column order only matters in table info. This order is used in user-faced table structure operation, like add column, describe column or as the default order of INSERT clause. None of them is connected with the order in storage. + +# Drawback +Firstly, this is a breaking change. Delivering this change requires a full upgrade of the cluster. Secondly, this change may introduce some performance regression. For example, we have to pass the full table name in the `ScanRequest` instead of the `ColumnId`. But this influence is very limited, since the column index is only used in the region engine. + +# Alternatives + +There are two alternatives from the perspective of "what can be used as the column identifier": + +- Index of column to the table schema +- `ColumnId` of that column + +The first one is what we are using now. By choosing this way, it's required to keep the column order in the region engine the same as the table info. This is not hard to achieve, but it's a bit annoying. And things become tricky when there is internal column or different schemas like those stored in file format. And this is the initial purpose of this RFC, which is trying to decouple the table schema and region schema. + +The second one, in other hand, requires the `ColumnId` should be identical in all regions and `TableInfo`. It has the same drawback with the previous alternative, that the `TableInfo` and `RegionMetadata` are tighted together. Another point is that the `ColumnId` is assigned by the Metasrv, who doesn't need it but have to maintain it. And this also limits the functionality of `ColumnId`, by taking the ability of assigning it from concrete region engine. diff --git a/src/cmd/Cargo.toml b/src/cmd/Cargo.toml index c28b2982877b..7cf890356f6e 100644 --- a/src/cmd/Cargo.toml +++ b/src/cmd/Cargo.toml @@ -40,6 +40,7 @@ etcd-client.workspace = true file-engine.workspace = true frontend.workspace = true futures.workspace = true +human-panic = "1.2.2" lazy_static.workspace = true meta-client.workspace = true meta-srv.workspace = true diff --git a/src/cmd/src/bin/greptime.rs b/src/cmd/src/bin/greptime.rs index d8f19df4b613..070a79868abb 100644 --- a/src/cmd/src/bin/greptime.rs +++ b/src/cmd/src/bin/greptime.rs @@ -95,6 +95,14 @@ static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; #[tokio::main] async fn main() -> Result<()> { + let metadata = human_panic::Metadata { + version: env!("CARGO_PKG_VERSION").into(), + name: "GreptimeDB".into(), + authors: Default::default(), + homepage: "https://github.com/GreptimeTeam/greptimedb/discussions".into(), + }; + human_panic::setup_panic!(metadata); + common_telemetry::set_panic_hook(); let cli = greptimedb_cli(); diff --git a/src/cmd/src/cli/bench/metadata.rs b/src/cmd/src/cli/bench/metadata.rs index 7b77fed49dbd..6eedc18eac18 100644 --- a/src/cmd/src/cli/bench/metadata.rs +++ b/src/cmd/src/cli/bench/metadata.rs @@ -14,6 +14,7 @@ use std::time::Instant; +use common_meta::key::table_route::TableRouteValue; use common_meta::key::TableMetadataManagerRef; use common_meta::table_name::TableName; @@ -53,7 +54,11 @@ impl TableMetadataBencher { let start = Instant::now(); self.table_metadata_manager - .create_table_metadata(table_info, region_routes, region_wal_options) + .create_table_metadata( + table_info, + TableRouteValue::physical(region_routes), + region_wal_options, + ) .await .unwrap(); diff --git a/src/cmd/src/cli/upgrade.rs b/src/cmd/src/cli/upgrade.rs index e5615f4d8219..6936b13fd7b4 100644 --- a/src/cmd/src/cli/upgrade.rs +++ b/src/cmd/src/cli/upgrade.rs @@ -27,7 +27,7 @@ use common_meta::key::table_info::{TableInfoKey, TableInfoValue}; use common_meta::key::table_name::{TableNameKey, TableNameValue}; use common_meta::key::table_region::{TableRegionKey, TableRegionValue}; use common_meta::key::table_route::{TableRouteKey, TableRouteValue as NextTableRouteValue}; -use common_meta::key::{RegionDistribution, TableMetaKey}; +use common_meta::key::{RegionDistribution, TableMetaKey, TableMetaValue}; use common_meta::kv_backend::etcd::EtcdStore; use common_meta::kv_backend::KvBackendRef; use common_meta::range_stream::PaginationStream; @@ -153,7 +153,7 @@ impl MigrateTableMetadata { ) .unwrap(); - let new_table_value = NextTableRouteValue::new(table_route.region_routes); + let new_table_value = NextTableRouteValue::physical(table_route.region_routes); let table_id = table_route.table.id as u32; let new_key = TableRouteKey::new(table_id); diff --git a/src/cmd/src/standalone.rs b/src/cmd/src/standalone.rs index 43d597d3f918..7816da3e4901 100644 --- a/src/cmd/src/standalone.rs +++ b/src/cmd/src/standalone.rs @@ -18,7 +18,12 @@ use std::{fs, path}; use async_trait::async_trait; use clap::Parser; use common_catalog::consts::MIN_USER_TABLE_ID; +<<<<<<< HEAD use common_config::{metadata_store_dir, KvBackendConfig, WalConfig as DatanodeWalConfig}; +======= +use common_config::wal::StandaloneWalConfig; +use common_config::{metadata_store_dir, KvBackendConfig}; +>>>>>>> develop use common_meta::cache_invalidator::DummyCacheInvalidator; use common_meta::datanode_manager::DatanodeManagerRef; use common_meta::ddl::{DdlTaskExecutorRef, TableMetadataAllocatorRef}; @@ -106,8 +111,12 @@ pub struct StandaloneOptions { pub opentsdb: OpentsdbOptions, pub influxdb: InfluxdbOptions, pub prom_store: PromStoreOptions, +<<<<<<< HEAD pub wal_meta: MetaSrvWalConfig, pub wal_datanode: DatanodeWalConfig, +======= + pub wal: StandaloneWalConfig, +>>>>>>> develop pub storage: StorageConfig, pub metadata_store: KvBackendConfig, pub procedure: ProcedureConfig, @@ -130,8 +139,7 @@ impl Default for StandaloneOptions { opentsdb: OpentsdbOptions::default(), influxdb: InfluxdbOptions::default(), prom_store: PromStoreOptions::default(), - wal_meta: MetaSrvWalConfig::default(), - wal_datanode: DatanodeWalConfig::default(), + wal: StandaloneWalConfig::default(), storage: StorageConfig::default(), metadata_store: KvBackendConfig::default(), procedure: ProcedureConfig::default(), @@ -170,7 +178,11 @@ impl StandaloneOptions { DatanodeOptions { node_id: Some(0), enable_telemetry: self.enable_telemetry, +<<<<<<< HEAD wal: self.wal_datanode, +======= + wal: self.wal.into(), +>>>>>>> develop storage: self.storage, region_engine: self.region_engine, rpc_addr: self.grpc.addr, @@ -342,7 +354,11 @@ impl StartCommand { let procedure = opts.procedure.clone(); let frontend = opts.clone().frontend_options(); let logging = opts.logging.clone(); +<<<<<<< HEAD let wal_meta = opts.wal_meta.clone(); +======= + let wal_meta = opts.wal.clone().into(); +>>>>>>> develop let datanode = opts.datanode_options().clone(); Ok(Options::Standalone(Box::new(MixOptions { @@ -484,6 +500,7 @@ mod tests { use auth::{Identity, Password, UserProviderRef}; use common_base::readable_size::ReadableSize; + use common_config::WalConfig; use common_test_util::temp_dir::create_named_temp_file; use datanode::config::{FileConfig, GcsConfig}; use servers::Mode; @@ -534,6 +551,7 @@ mod tests { purge_interval = "10m" read_batch_size = 128 sync_write = false + [storage] data_home = "/tmp/greptimedb/" type = "File" diff --git a/src/common/config/src/wal.rs b/src/common/config/src/wal.rs index 60128d14b35e..f9c492758e63 100644 --- a/src/common/config/src/wal.rs +++ b/src/common/config/src/wal.rs @@ -18,7 +18,9 @@ pub mod raft_engine; use serde::{Deserialize, Serialize}; use serde_with::with_prefix; -pub use crate::wal::kafka::{KafkaConfig, KafkaOptions as KafkaWalOptions, Topic as KafkaWalTopic}; +pub use crate::wal::kafka::{ + KafkaConfig, KafkaOptions as KafkaWalOptions, StandaloneKafkaConfig, Topic as KafkaWalTopic, +}; pub use crate::wal::raft_engine::RaftEngineConfig; /// An encoded wal options will be wrapped into a (WAL_OPTIONS_KEY, encoded wal options) key-value pair @@ -27,30 +29,49 @@ pub const WAL_OPTIONS_KEY: &str = "wal_options"; /// Wal config for datanode. #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] -#[serde(tag = "provider")] +#[serde(tag = "provider", rename_all = "snake_case")] pub enum WalConfig { - #[serde(rename = "raft_engine")] RaftEngine(RaftEngineConfig), - #[serde(rename = "kafka")] Kafka(KafkaConfig), } +impl From for WalConfig { + fn from(value: StandaloneWalConfig) -> Self { + match value { + StandaloneWalConfig::RaftEngine(config) => WalConfig::RaftEngine(config), + StandaloneWalConfig::Kafka(config) => WalConfig::Kafka(config.base), + } + } +} + impl Default for WalConfig { fn default() -> Self { WalConfig::RaftEngine(RaftEngineConfig::default()) } } +/// Wal config for datanode. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(tag = "provider", rename_all = "snake_case")] +pub enum StandaloneWalConfig { + RaftEngine(RaftEngineConfig), + Kafka(StandaloneKafkaConfig), +} + +impl Default for StandaloneWalConfig { + fn default() -> Self { + StandaloneWalConfig::RaftEngine(RaftEngineConfig::default()) + } +} + /// Wal options allocated to a region. /// A wal options is encoded by metasrv with `serde_json::to_string`, and then decoded /// by datanode with `serde_json::from_str`. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] -#[serde(tag = "wal.provider")] +#[serde(tag = "wal.provider", rename_all = "snake_case")] pub enum WalOptions { #[default] - #[serde(rename = "raft_engine")] RaftEngine, - #[serde(rename = "kafka")] #[serde(with = "prefix_wal_kafka")] Kafka(KafkaWalOptions), } @@ -64,15 +85,16 @@ mod tests { use common_base::readable_size::ReadableSize; use rskafka::client::partition::Compression as RsKafkaCompression; + use crate::wal::kafka::KafkaBackoffConfig; use crate::wal::{KafkaConfig, KafkaWalOptions, WalOptions}; #[test] fn test_serde_kafka_config() { let toml_str = r#" - broker_endpoints = ["127.0.0.1:9090"] + broker_endpoints = ["127.0.0.1:9092"] max_batch_size = "4MB" linger = "200ms" - max_wait_time = "100ms" + produce_record_timeout = "100ms" backoff_init = "500ms" backoff_max = "10s" backoff_base = 2 @@ -80,15 +102,17 @@ mod tests { "#; let decoded: KafkaConfig = toml::from_str(toml_str).unwrap(); let expected = KafkaConfig { - broker_endpoints: vec!["127.0.0.1:9090".to_string()], + broker_endpoints: vec!["127.0.0.1:9092".to_string()], compression: RsKafkaCompression::default(), max_batch_size: ReadableSize::mb(4), linger: Duration::from_millis(200), - max_wait_time: Duration::from_millis(100), - backoff_init: Duration::from_millis(500), - backoff_max: Duration::from_secs(10), - backoff_base: 2, - backoff_deadline: Some(Duration::from_secs(60 * 5)), + produce_record_timeout: Duration::from_millis(100), + backoff: KafkaBackoffConfig { + init: Duration::from_millis(500), + max: Duration::from_secs(10), + base: 2, + deadline: Some(Duration::from_secs(60 * 5)), + }, }; assert_eq!(decoded, expected); } diff --git a/src/common/config/src/wal/kafka.rs b/src/common/config/src/wal/kafka.rs index eb6795054141..e93aa6cb2271 100644 --- a/src/common/config/src/wal/kafka.rs +++ b/src/common/config/src/wal/kafka.rs @@ -17,12 +17,21 @@ use std::time::Duration; use common_base::readable_size::ReadableSize; use rskafka::client::partition::Compression as RsKafkaCompression; use serde::{Deserialize, Serialize}; +use serde_with::with_prefix; /// Topic name prefix. pub const TOPIC_NAME_PREFIX: &str = "greptimedb_wal_topic"; /// Kafka wal topic. pub type Topic = String; +/// The type of the topic selector, i.e. with which strategy to select a topic. +#[derive(Default, Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum TopicSelectorType { + #[default] + RoundRobin, +} + /// Configurations for kafka wal. #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[serde(default)] @@ -40,34 +49,89 @@ pub struct KafkaConfig { pub linger: Duration, /// The maximum amount of time (in milliseconds) to wait for Kafka records to be returned. #[serde(with = "humantime_serde")] - pub max_wait_time: Duration, + pub produce_record_timeout: Duration, + /// The backoff config. + #[serde(flatten, with = "kafka_backoff")] + pub backoff: KafkaBackoffConfig, +} + +impl Default for KafkaConfig { + fn default() -> Self { + Self { + broker_endpoints: vec!["127.0.0.1:9092".to_string()], + compression: RsKafkaCompression::NoCompression, + max_batch_size: ReadableSize::mb(4), + linger: Duration::from_millis(200), + produce_record_timeout: Duration::from_millis(100), + backoff: KafkaBackoffConfig::default(), + } + } +} + +with_prefix!(pub kafka_backoff "backoff_"); + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(default)] +pub struct KafkaBackoffConfig { /// The initial backoff for kafka clients. #[serde(with = "humantime_serde")] - pub backoff_init: Duration, + pub init: Duration, /// The maximum backoff for kafka clients. #[serde(with = "humantime_serde")] - pub backoff_max: Duration, + pub max: Duration, /// Exponential backoff rate, i.e. next backoff = base * current backoff. // Sets to u32 type since some structs containing the KafkaConfig need to derive the Eq trait. - pub backoff_base: u32, + pub base: u32, /// Stop reconnecting if the total wait time reaches the deadline. /// If it's None, the reconnecting won't terminate. #[serde(with = "humantime_serde")] - pub backoff_deadline: Option, + pub deadline: Option, } -impl Default for KafkaConfig { +impl Default for KafkaBackoffConfig { fn default() -> Self { Self { - broker_endpoints: vec!["127.0.0.1:9090".to_string()], - compression: RsKafkaCompression::NoCompression, - max_batch_size: ReadableSize::mb(4), - linger: Duration::from_millis(200), - max_wait_time: Duration::from_millis(100), - backoff_init: Duration::from_millis(500), - backoff_max: Duration::from_secs(10), - backoff_base: 2, - backoff_deadline: Some(Duration::from_secs(60 * 5)), // 5 mins + init: Duration::from_millis(500), + max: Duration::from_secs(10), + base: 2, + deadline: Some(Duration::from_secs(60 * 5)), // 5 mins + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(default)] +pub struct StandaloneKafkaConfig { + #[serde(flatten)] + pub base: KafkaConfig, + /// Number of topics to be created upon start. + pub num_topics: usize, + /// The type of the topic selector with which to select a topic for a region. + pub selector_type: TopicSelectorType, + /// Topic name prefix. + pub topic_name_prefix: String, + /// Number of partitions per topic. + pub num_partitions: i32, + /// The replication factor of each topic. + pub replication_factor: i16, + /// Above which a topic creation operation will be cancelled. + #[serde(with = "humantime_serde")] + pub create_topic_timeout: Duration, +} + +impl Default for StandaloneKafkaConfig { + fn default() -> Self { + let base = KafkaConfig::default(); + let replication_factor = base.broker_endpoints.len() as i16; + + Self { + base, + num_topics: 64, + selector_type: TopicSelectorType::RoundRobin, + topic_name_prefix: "greptimedb_wal_topic".to_string(), + num_partitions: 1, + replication_factor, + create_topic_timeout: Duration::from_secs(30), } } } diff --git a/src/common/meta/src/ddl.rs b/src/common/meta/src/ddl.rs index 793df3f9c4d6..bb5220724ab6 100644 --- a/src/common/meta/src/ddl.rs +++ b/src/common/meta/src/ddl.rs @@ -21,10 +21,10 @@ use store_api::storage::{RegionNumber, TableId}; use crate::cache_invalidator::CacheInvalidatorRef; use crate::datanode_manager::DatanodeManagerRef; use crate::error::Result; +use crate::key::table_route::TableRouteValue; use crate::key::TableMetadataManagerRef; use crate::region_keeper::MemoryRegionKeeperRef; use crate::rpc::ddl::{CreateTableTask, SubmitDdlTaskRequest, SubmitDdlTaskResponse}; -use crate::rpc::router::RegionRoute; pub mod alter_table; pub mod create_table; @@ -58,7 +58,7 @@ pub struct TableMetadata { /// Table id. pub table_id: TableId, /// Route information for each region of the table. - pub region_routes: Vec, + pub table_route: TableRouteValue, /// The encoded wal options for regions of the table. // If a region does not have an associated wal options, no key for the region would be found in the map. pub region_wal_options: HashMap, diff --git a/src/common/meta/src/ddl/alter_table.rs b/src/common/meta/src/ddl/alter_table.rs index a48e46913173..092d4dd24263 100644 --- a/src/common/meta/src/ddl/alter_table.rs +++ b/src/common/meta/src/ddl/alter_table.rs @@ -182,7 +182,6 @@ impl AlterTableProcedure { pub async fn submit_alter_region_requests(&mut self) -> Result { let table_id = self.data.table_id(); - let table_ref = self.data.table_ref(); let table_route = self .context @@ -190,9 +189,7 @@ impl AlterTableProcedure { .table_route_manager() .get(table_id) .await? - .with_context(|| TableRouteNotFoundSnafu { - table_name: table_ref.to_string(), - })? + .context(TableRouteNotFoundSnafu { table_id })? .into_inner(); let region_routes = table_route.region_routes(); diff --git a/src/common/meta/src/ddl/create_table.rs b/src/common/meta/src/ddl/create_table.rs index 35050643d3c2..c73844fc8337 100644 --- a/src/common/meta/src/ddl/create_table.rs +++ b/src/common/meta/src/ddl/create_table.rs @@ -18,9 +18,8 @@ use api::v1::region::region_request::Body as PbRegionRequest; use api::v1::region::{ CreateRequest as PbCreateRegionRequest, RegionColumnDef, RegionRequest, RegionRequestHeader, }; -use api::v1::{ColumnDef, CreateTableExpr, SemanticType}; +use api::v1::{ColumnDef, SemanticType}; use async_trait::async_trait; -use common_catalog::consts::METRIC_ENGINE; use common_config::WAL_OPTIONS_KEY; use common_error::ext::BoxedError; use common_procedure::error::{ @@ -40,8 +39,9 @@ use table::metadata::{RawTableInfo, TableId}; use crate::ddl::utils::{handle_operate_region_error, handle_retry_error, region_storage_path}; use crate::ddl::DdlContext; -use crate::error::{self, Result, TableInfoNotFoundSnafu}; +use crate::error::{self, Result, TableRouteNotFoundSnafu}; use crate::key::table_name::TableNameKey; +use crate::key::table_route::TableRouteValue; use crate::metrics; use crate::region_keeper::OperatingRegionGuard; use crate::rpc::ddl::CreateTableTask; @@ -60,13 +60,13 @@ impl CreateTableProcedure { pub fn new( cluster_id: u64, task: CreateTableTask, - region_routes: Vec, + table_route: TableRouteValue, region_wal_options: HashMap, context: DdlContext, ) -> Self { Self { context, - creator: TableCreator::new(cluster_id, task, region_routes, region_wal_options), + creator: TableCreator::new(cluster_id, task, table_route, region_wal_options), } } @@ -78,10 +78,12 @@ impl CreateTableProcedure { opening_regions: vec![], }; - creator - .register_opening_regions(&context) - .map_err(BoxedError::new) - .context(ExternalSnafu)?; + if let TableRouteValue::Physical(x) = &creator.data.table_route { + creator.opening_regions = creator + .register_opening_regions(&context, &x.region_routes) + .map_err(BoxedError::new) + .context(ExternalSnafu)?; + } Ok(CreateTableProcedure { context, creator }) } @@ -94,10 +96,6 @@ impl CreateTableProcedure { self.table_info().ident.table_id } - pub fn region_routes(&self) -> &Vec { - &self.creator.data.region_routes - } - pub fn region_wal_options(&self) -> &HashMap { &self.creator.data.region_wal_options } @@ -132,7 +130,10 @@ impl CreateTableProcedure { Ok(Status::executing(true)) } - pub fn new_region_request_builder(&self) -> Result { + pub fn new_region_request_builder( + &self, + physical_table_id: Option, + ) -> Result { let create_table_expr = &self.creator.data.task.create_table; let column_defs = create_table_expr @@ -191,16 +192,54 @@ impl CreateTableProcedure { options: create_table_expr.table_options.clone(), }; - let builder = CreateRequestBuilder::new_template(self.context.clone(), template); - Ok(builder) + Ok(CreateRequestBuilder { + template, + physical_table_id, + }) } pub async fn on_datanode_create_regions(&mut self) -> Result { + match &self.creator.data.table_route { + TableRouteValue::Physical(x) => { + let region_routes = x.region_routes.clone(); + let request_builder = self.new_region_request_builder(None)?; + self.create_regions(®ion_routes, request_builder).await + } + TableRouteValue::Logical(x) => { + let physical_table_id = x.physical_table_id(); + + let physical_table_route = self + .context + .table_metadata_manager + .table_route_manager() + .get(physical_table_id) + .await? + .context(TableRouteNotFoundSnafu { + table_id: physical_table_id, + })?; + let region_routes = physical_table_route.region_routes(); + + let request_builder = self.new_region_request_builder(Some(physical_table_id))?; + + self.create_regions(region_routes, request_builder).await + } + } + } + + async fn create_regions( + &mut self, + region_routes: &[RegionRoute], + request_builder: CreateRequestBuilder, + ) -> Result { // Registers opening regions - self.creator.register_opening_regions(&self.context)?; + let guards = self + .creator + .register_opening_regions(&self.context, region_routes)?; + if !guards.is_empty() { + self.creator.opening_regions = guards; + } let create_table_data = &self.creator.data; - let region_routes = &create_table_data.region_routes; let region_wal_options = &create_table_data.region_wal_options; let create_table_expr = &create_table_data.task.create_table; @@ -208,8 +247,6 @@ impl CreateTableProcedure { let schema = &create_table_expr.schema_name; let storage_path = region_storage_path(catalog, schema); - let mut request_builder = self.new_region_request_builder()?; - let leaders = find_leaders(region_routes); let mut create_region_tasks = Vec::with_capacity(leaders.len()); @@ -221,12 +258,7 @@ impl CreateTableProcedure { for region_number in regions { let region_id = RegionId::new(self.table_id(), region_number); let create_region_request = request_builder - .build_one( - &self.creator.data.task.create_table, - region_id, - storage_path.clone(), - region_wal_options, - ) + .build_one(region_id, storage_path.clone(), region_wal_options) .await?; requests.push(PbRegionRequest::Create(create_region_request)); @@ -270,10 +302,13 @@ impl CreateTableProcedure { let manager = &self.context.table_metadata_manager; let raw_table_info = self.table_info().clone(); - let region_routes = self.region_routes().clone(); let region_wal_options = self.region_wal_options().clone(); manager - .create_table_metadata(raw_table_info, region_routes, region_wal_options) + .create_table_metadata( + raw_table_info, + self.creator.data.table_route.clone(), + region_wal_options, + ) .await?; info!("Created table metadata for table {table_id}"); @@ -329,7 +364,7 @@ impl TableCreator { pub fn new( cluster_id: u64, task: CreateTableTask, - region_routes: Vec, + table_route: TableRouteValue, region_wal_options: HashMap, ) -> Self { Self { @@ -337,21 +372,23 @@ impl TableCreator { state: CreateTableState::Prepare, cluster_id, task, - region_routes, + table_route, region_wal_options, }, opening_regions: vec![], } } - /// Register opening regions if doesn't exist. - pub fn register_opening_regions(&mut self, context: &DdlContext) -> Result<()> { - let region_routes = &self.data.region_routes; - + /// Registers and returns the guards of the opening region if they don't exist. + fn register_opening_regions( + &self, + context: &DdlContext, + region_routes: &[RegionRoute], + ) -> Result> { let opening_regions = operating_leader_regions(region_routes); if self.opening_regions.len() == opening_regions.len() { - return Ok(()); + return Ok(vec![]); } let mut opening_region_guards = Vec::with_capacity(opening_regions.len()); @@ -366,9 +403,7 @@ impl TableCreator { })?; opening_region_guards.push(guard); } - - self.opening_regions = opening_region_guards; - Ok(()) + Ok(opening_region_guards) } } @@ -386,7 +421,7 @@ pub enum CreateTableState { pub struct CreateTableData { pub state: CreateTableState, pub task: CreateTableTask, - pub region_routes: Vec, + table_route: TableRouteValue, pub region_wal_options: HashMap, pub cluster_id: u64, } @@ -399,28 +434,18 @@ impl CreateTableData { /// Builder for [PbCreateRegionRequest]. pub struct CreateRequestBuilder { - context: DdlContext, template: PbCreateRegionRequest, /// Optional. Only for metric engine. physical_table_id: Option, } impl CreateRequestBuilder { - fn new_template(context: DdlContext, template: PbCreateRegionRequest) -> Self { - Self { - context, - template, - physical_table_id: None, - } - } - pub fn template(&self) -> &PbCreateRegionRequest { &self.template } async fn build_one( - &mut self, - create_expr: &CreateTableExpr, + &self, region_id: RegionId, storage_path: String, region_wal_options: &HashMap, @@ -438,49 +463,18 @@ impl CreateRequestBuilder { .insert(WAL_OPTIONS_KEY.to_string(), wal_options.clone()) }); - if self.template.engine == METRIC_ENGINE { - self.metric_engine_hook(create_expr, region_id, &mut request) - .await?; - } - - Ok(request) - } + if let Some(physical_table_id) = self.physical_table_id { + // Logical table has the same region numbers with physical table, and they have a one-to-one mapping. + // For example, region 0 of logical table must resides with region 0 of physical table. So here we can + // simply concat the physical table id and the logical region number to get the physical region id. + let physical_region_id = RegionId::new(physical_table_id, region_id.region_number()); - async fn metric_engine_hook( - &mut self, - create_expr: &CreateTableExpr, - region_id: RegionId, - request: &mut PbCreateRegionRequest, - ) -> Result<()> { - if let Some(physical_table_name) = request.options.get(LOGICAL_TABLE_METADATA_KEY) { - let table_id = if let Some(table_id) = self.physical_table_id { - table_id - } else { - let table_name_manager = self.context.table_metadata_manager.table_name_manager(); - let table_name_key = TableNameKey::new( - &create_expr.catalog_name, - &create_expr.schema_name, - physical_table_name, - ); - let table_id = table_name_manager - .get(table_name_key) - .await? - .context(TableInfoNotFoundSnafu { - table_name: physical_table_name, - })? - .table_id(); - self.physical_table_id = Some(table_id); - table_id - }; - // Concat physical table's table id and corresponding region number to get - // the physical region id. - let physical_region_id = RegionId::new(table_id, region_id.region_number()); request.options.insert( LOGICAL_TABLE_METADATA_KEY.to_string(), physical_region_id.as_u64().to_string(), ); } - Ok(()) + Ok(request) } } diff --git a/src/common/meta/src/ddl_manager.rs b/src/common/meta/src/ddl_manager.rs index 471de7ac852f..6b1e4bf94f38 100644 --- a/src/common/meta/src/ddl_manager.rs +++ b/src/common/meta/src/ddl_manager.rs @@ -177,7 +177,7 @@ impl DdlManager { &self, cluster_id: u64, create_table_task: CreateTableTask, - region_routes: Vec, + table_route: TableRouteValue, region_wal_options: HashMap, ) -> Result { let context = self.create_context(); @@ -185,7 +185,7 @@ impl DdlManager { let procedure = CreateTableProcedure::new( cluster_id, create_table_task, - region_routes, + table_route, region_wal_options, context, ); @@ -275,9 +275,8 @@ async fn handle_truncate_table_task( table_name: table_ref.to_string(), })?; - let table_route_value = table_route_value.with_context(|| error::TableRouteNotFoundSnafu { - table_name: table_ref.to_string(), - })?; + let table_route_value = + table_route_value.context(error::TableRouteNotFoundSnafu { table_id })?; let table_route = table_route_value.into_inner().region_routes().clone(); @@ -356,9 +355,8 @@ async fn handle_drop_table_task( table_name: table_ref.to_string(), })?; - let table_route_value = table_route_value.with_context(|| error::TableRouteNotFoundSnafu { - table_name: table_ref.to_string(), - })?; + let table_route_value = + table_route_value.context(error::TableRouteNotFoundSnafu { table_id })?; let id = ddl_manager .submit_drop_table_task( @@ -392,7 +390,7 @@ async fn handle_create_table_task( let TableMetadata { table_id, - region_routes, + table_route, region_wal_options, } = table_meta; @@ -402,7 +400,7 @@ async fn handle_create_table_task( .submit_create_table_task( cluster_id, create_table_task, - region_routes, + table_route, region_wal_options, ) .await?; diff --git a/src/common/meta/src/error.rs b/src/common/meta/src/error.rs index 519d8ec7a1af..c120c8ba939d 100644 --- a/src/common/meta/src/error.rs +++ b/src/common/meta/src/error.rs @@ -135,9 +135,9 @@ pub enum Error { source: table::error::Error, }, - #[snafu(display("Table route not found: {}", table_name))] + #[snafu(display("Failed to find table route for table id {}", table_id))] TableRouteNotFound { - table_name: String, + table_id: TableId, location: Location, }, diff --git a/src/common/meta/src/key.rs b/src/common/meta/src/key.rs index d0e24c309b2e..bb2b87a973f5 100644 --- a/src/common/meta/src/key.rs +++ b/src/common/meta/src/key.rs @@ -147,6 +147,14 @@ pub trait TableMetaKey { fn as_raw_key(&self) -> Vec; } +pub trait TableMetaValue { + fn try_from_raw_value(raw_value: &[u8]) -> Result + where + Self: Sized; + + fn try_as_raw_value(&self) -> Result>; +} + pub type TableMetadataManagerRef = Arc; pub struct TableMetadataManager { @@ -221,7 +229,9 @@ impl Serialize for DeserializedValueWithBytes Deserialize<'de> for DeserializedValueWithBytes { +impl<'de, T: DeserializeOwned + Serialize + TableMetaValue> Deserialize<'de> + for DeserializedValueWithBytes +{ /// - Deserialize behaviors: /// /// The `inner` field will be deserialized from the `bytes` field. @@ -248,11 +258,11 @@ impl Clone for DeserializedValueWithByt } } -impl DeserializedValueWithBytes { +impl DeserializedValueWithBytes { /// Returns a struct containing a deserialized value and an original `bytes`. /// It accepts original bytes of inner. pub fn from_inner_bytes(bytes: Bytes) -> Result { - let inner = serde_json::from_slice(&bytes).context(error::SerdeJsonSnafu)?; + let inner = T::try_from_raw_value(&bytes)?; Ok(Self { bytes, inner }) } @@ -373,13 +383,10 @@ impl TableMetadataManager { pub async fn create_table_metadata( &self, mut table_info: RawTableInfo, - region_routes: Vec, + table_route_value: TableRouteValue, region_wal_options: HashMap, ) -> Result<()> { - let region_numbers = region_routes - .iter() - .map(|region| region.region.id.region_number()) - .collect::>(); + let region_numbers = table_route_value.region_numbers(); table_info.meta.region_numbers = region_numbers; let table_id = table_info.ident.table_id; let engine = table_info.meta.engine.clone(); @@ -403,30 +410,28 @@ impl TableMetadataManager { .table_info_manager() .build_create_txn(table_id, &table_info_value)?; - // Creates datanode table key value pairs. - let distribution = region_distribution(®ion_routes)?; - let create_datanode_table_txn = self.datanode_table_manager().build_create_txn( - table_id, - &engine, - ®ion_storage_path, - region_options, - region_wal_options, - distribution, - )?; - - // Creates table route. - let table_route_value = TableRouteValue::new(region_routes); let (create_table_route_txn, on_create_table_route_failure) = self .table_route_manager() .build_create_txn(table_id, &table_route_value)?; - let txn = Txn::merge_all(vec![ + let mut txn = Txn::merge_all(vec![ create_table_name_txn, create_table_info_txn, - create_datanode_table_txn, create_table_route_txn, ]); + if let TableRouteValue::Physical(x) = &table_route_value { + let create_datanode_table_txn = self.datanode_table_manager().build_create_txn( + table_id, + &engine, + ®ion_storage_path, + region_options, + region_wal_options, + region_distribution(&x.region_routes)?, + )?; + txn = txn.merge(create_datanode_table_txn); + } + let r = self.kv_backend.txn(txn).await?; // Checks whether metadata was already created. @@ -711,12 +716,12 @@ impl_table_meta_key!(TableNameKey<'_>, TableInfoKey, DatanodeTableKey); macro_rules! impl_table_meta_value { ($($val_ty: ty), *) => { $( - impl $val_ty { - pub fn try_from_raw_value(raw_value: &[u8]) -> Result { + impl $crate::key::TableMetaValue for $val_ty { + fn try_from_raw_value(raw_value: &[u8]) -> Result { serde_json::from_slice(raw_value).context(SerdeJsonSnafu) } - pub fn try_as_raw_value(&self) -> Result> { + fn try_as_raw_value(&self) -> Result> { serde_json::to_vec(self).context(SerdeJsonSnafu) } } @@ -744,8 +749,7 @@ macro_rules! impl_optional_meta_value { impl_table_meta_value! { TableNameValue, TableInfoValue, - DatanodeTableValue, - TableRouteValue + DatanodeTableValue } impl_optional_meta_value! { @@ -765,6 +769,7 @@ mod tests { use super::datanode_table::DatanodeTableKey; use super::test_utils; use crate::ddl::utils::region_storage_path; + use crate::error::Result; use crate::key::datanode_table::RegionInfo; use crate::key::table_info::TableInfoValue; use crate::key::table_name::TableNameKey; @@ -780,14 +785,14 @@ mod tests { let region_routes = vec![region_route.clone()]; let expected_region_routes = - TableRouteValue::new(vec![region_route.clone(), region_route.clone()]); + TableRouteValue::physical(vec![region_route.clone(), region_route.clone()]); let expected = serde_json::to_vec(&expected_region_routes).unwrap(); // Serialize behaviors: // The inner field will be ignored. let value = DeserializedValueWithBytes { // ignored - inner: TableRouteValue::new(region_routes.clone()), + inner: TableRouteValue::physical(region_routes.clone()), bytes: Bytes::from(expected.clone()), }; @@ -831,6 +836,20 @@ mod tests { test_utils::new_test_table_info(10, region_numbers) } + async fn create_physical_table_metadata( + table_metadata_manager: &TableMetadataManager, + table_info: RawTableInfo, + region_routes: Vec, + ) -> Result<()> { + table_metadata_manager + .create_table_metadata( + table_info, + TableRouteValue::physical(region_routes), + HashMap::default(), + ) + .await + } + #[tokio::test] async fn test_create_table_metadata() { let mem_kv = Arc::new(MemoryKvBackend::default()); @@ -840,34 +859,33 @@ mod tests { let table_info: RawTableInfo = new_test_table_info(region_routes.iter().map(|r| r.region.id.region_number())).into(); // creates metadata. - table_metadata_manager - .create_table_metadata( - table_info.clone(), - region_routes.clone(), - HashMap::default(), - ) - .await - .unwrap(); + create_physical_table_metadata( + &table_metadata_manager, + table_info.clone(), + region_routes.clone(), + ) + .await + .unwrap(); + // if metadata was already created, it should be ok. - table_metadata_manager - .create_table_metadata( - table_info.clone(), - region_routes.clone(), - HashMap::default(), - ) - .await - .unwrap(); + assert!(create_physical_table_metadata( + &table_metadata_manager, + table_info.clone(), + region_routes.clone(), + ) + .await + .is_ok()); + let mut modified_region_routes = region_routes.clone(); modified_region_routes.push(region_route.clone()); // if remote metadata was exists, it should return an error. - assert!(table_metadata_manager - .create_table_metadata( - table_info.clone(), - modified_region_routes, - HashMap::default() - ) - .await - .is_err()); + assert!(create_physical_table_metadata( + &table_metadata_manager, + table_info.clone(), + modified_region_routes + ) + .await + .is_err()); let (remote_table_info, remote_table_route) = table_metadata_manager .get_full_table_info(10) @@ -894,18 +912,18 @@ mod tests { new_test_table_info(region_routes.iter().map(|r| r.region.id.region_number())).into(); let table_id = table_info.ident.table_id; let datanode_id = 2; - let table_route_value = - DeserializedValueWithBytes::from_inner(TableRouteValue::new(region_routes.clone())); + let table_route_value = DeserializedValueWithBytes::from_inner(TableRouteValue::physical( + region_routes.clone(), + )); // creates metadata. - table_metadata_manager - .create_table_metadata( - table_info.clone(), - region_routes.clone(), - HashMap::default(), - ) - .await - .unwrap(); + create_physical_table_metadata( + &table_metadata_manager, + table_info.clone(), + region_routes.clone(), + ) + .await + .unwrap(); let table_info_value = DeserializedValueWithBytes::from_inner(TableInfoValue::new(table_info.clone())); @@ -973,14 +991,14 @@ mod tests { new_test_table_info(region_routes.iter().map(|r| r.region.id.region_number())).into(); let table_id = table_info.ident.table_id; // creates metadata. - table_metadata_manager - .create_table_metadata( - table_info.clone(), - region_routes.clone(), - HashMap::default(), - ) - .await - .unwrap(); + create_physical_table_metadata( + &table_metadata_manager, + table_info.clone(), + region_routes.clone(), + ) + .await + .unwrap(); + let new_table_name = "another_name".to_string(); let table_info_value = DeserializedValueWithBytes::from_inner(TableInfoValue::new(table_info.clone())); @@ -1045,14 +1063,14 @@ mod tests { new_test_table_info(region_routes.iter().map(|r| r.region.id.region_number())).into(); let table_id = table_info.ident.table_id; // creates metadata. - table_metadata_manager - .create_table_metadata( - table_info.clone(), - region_routes.clone(), - HashMap::default(), - ) - .await - .unwrap(); + create_physical_table_metadata( + &table_metadata_manager, + table_info.clone(), + region_routes.clone(), + ) + .await + .unwrap(); + let mut new_table_info = table_info.clone(); new_table_info.name = "hi".to_string(); let current_table_info_value = @@ -1123,17 +1141,18 @@ mod tests { let table_info: RawTableInfo = new_test_table_info(region_routes.iter().map(|r| r.region.id.region_number())).into(); let table_id = table_info.ident.table_id; - let current_table_route_value = - DeserializedValueWithBytes::from_inner(TableRouteValue::new(region_routes.clone())); + let current_table_route_value = DeserializedValueWithBytes::from_inner( + TableRouteValue::physical(region_routes.clone()), + ); + // creates metadata. - table_metadata_manager - .create_table_metadata( - table_info.clone(), - region_routes.clone(), - HashMap::default(), - ) - .await - .unwrap(); + create_physical_table_metadata( + &table_metadata_manager, + table_info.clone(), + region_routes.clone(), + ) + .await + .unwrap(); table_metadata_manager .update_leader_region_status(table_id, ¤t_table_route_value, |region_route| { @@ -1193,17 +1212,19 @@ mod tests { let engine = table_info.meta.engine.as_str(); let region_storage_path = region_storage_path(&table_info.catalog_name, &table_info.schema_name); - let current_table_route_value = - DeserializedValueWithBytes::from_inner(TableRouteValue::new(region_routes.clone())); + let current_table_route_value = DeserializedValueWithBytes::from_inner( + TableRouteValue::physical(region_routes.clone()), + ); + // creates metadata. - table_metadata_manager - .create_table_metadata( - table_info.clone(), - region_routes.clone(), - HashMap::default(), - ) - .await - .unwrap(); + create_physical_table_metadata( + &table_metadata_manager, + table_info.clone(), + region_routes.clone(), + ) + .await + .unwrap(); + assert_datanode_table(&table_metadata_manager, table_id, ®ion_routes).await; let new_region_routes = vec![ new_region_route(1, 1), diff --git a/src/common/meta/src/key/datanode_table.rs b/src/common/meta/src/key/datanode_table.rs index 3ddb00a19ac2..b2e25e014bc8 100644 --- a/src/common/meta/src/key/datanode_table.rs +++ b/src/common/meta/src/key/datanode_table.rs @@ -24,7 +24,8 @@ use table::metadata::TableId; use crate::error::{InvalidTableMetadataSnafu, Result}; use crate::key::{ - RegionDistribution, TableMetaKey, DATANODE_TABLE_KEY_PATTERN, DATANODE_TABLE_KEY_PREFIX, + RegionDistribution, TableMetaKey, TableMetaValue, DATANODE_TABLE_KEY_PATTERN, + DATANODE_TABLE_KEY_PREFIX, }; use crate::kv_backend::txn::{Txn, TxnOp}; use crate::kv_backend::KvBackendRef; diff --git a/src/common/meta/src/key/table_info.rs b/src/common/meta/src/key/table_info.rs index 21f8656451b7..5415a0f1f941 100644 --- a/src/common/meta/src/key/table_info.rs +++ b/src/common/meta/src/key/table_info.rs @@ -18,7 +18,7 @@ use serde::{Deserialize, Serialize}; use table::engine::TableReference; use table::metadata::{RawTableInfo, TableId}; -use super::{DeserializedValueWithBytes, TABLE_INFO_KEY_PREFIX}; +use super::{DeserializedValueWithBytes, TableMetaValue, TABLE_INFO_KEY_PREFIX}; use crate::error::Result; use crate::key::{to_removed_key, TableMetaKey}; use crate::kv_backend::txn::{Compare, CompareOp, Txn, TxnOp, TxnOpResponse}; diff --git a/src/common/meta/src/key/table_name.rs b/src/common/meta/src/key/table_name.rs index cf3690e3ff8d..12d44dace180 100644 --- a/src/common/meta/src/key/table_name.rs +++ b/src/common/meta/src/key/table_name.rs @@ -18,7 +18,7 @@ use serde::{Deserialize, Serialize}; use snafu::OptionExt; use table::metadata::TableId; -use super::{TABLE_NAME_KEY_PATTERN, TABLE_NAME_KEY_PREFIX}; +use super::{TableMetaValue, TABLE_NAME_KEY_PATTERN, TABLE_NAME_KEY_PREFIX}; use crate::error::{Error, InvalidTableMetadataSnafu, Result}; use crate::key::{to_removed_key, TableMetaKey}; use crate::kv_backend::memory::MemoryKvBackend; diff --git a/src/common/meta/src/key/table_region.rs b/src/common/meta/src/key/table_region.rs index 7dabc8f114ef..e51e1a547194 100644 --- a/src/common/meta/src/key/table_region.rs +++ b/src/common/meta/src/key/table_region.rs @@ -71,8 +71,8 @@ impl_table_meta_value! {TableRegionValue} #[cfg(test)] mod tests { - use super::*; + use crate::key::TableMetaValue; #[test] fn test_serde() { diff --git a/src/common/meta/src/key/table_route.rs b/src/common/meta/src/key/table_route.rs index 852c17937c34..f799f321e544 100644 --- a/src/common/meta/src/key/table_route.rs +++ b/src/common/meta/src/key/table_route.rs @@ -16,11 +16,12 @@ use std::collections::HashMap; use std::fmt::Display; use serde::{Deserialize, Serialize}; -use store_api::storage::RegionId; +use snafu::ResultExt; +use store_api::storage::{RegionId, RegionNumber}; use table::metadata::TableId; -use super::DeserializedValueWithBytes; -use crate::error::Result; +use super::{DeserializedValueWithBytes, TableMetaValue}; +use crate::error::{Result, SerdeJsonSnafu}; use crate::key::{to_removed_key, RegionDistribution, TableMetaKey, TABLE_ROUTE_PREFIX}; use crate::kv_backend::txn::{Compare, CompareOp, Txn, TxnOp, TxnOpResponse}; use crate::kv_backend::KvBackendRef; @@ -38,6 +39,7 @@ impl TableRouteKey { } #[derive(Debug, PartialEq, Serialize, Deserialize, Clone)] +#[serde(tag = "type", rename_all = "snake_case")] pub enum TableRouteValue { Physical(PhysicalTableRouteValue), Logical(LogicalTableRouteValue), @@ -55,11 +57,8 @@ pub struct LogicalTableRouteValue { } impl TableRouteValue { - pub fn new(region_routes: Vec) -> Self { - Self::Physical(PhysicalTableRouteValue { - region_routes, - version: 0, - }) + pub fn physical(region_routes: Vec) -> Self { + Self::Physical(PhysicalTableRouteValue::new(region_routes)) } /// Returns a new version [TableRouteValue] with `region_routes`. @@ -102,6 +101,59 @@ impl TableRouteValue { _ => unreachable!("Mistakenly been treated as a Physical TableRoute: {self:?}"), } } + + pub fn region_numbers(&self) -> Vec { + match self { + TableRouteValue::Physical(x) => x + .region_routes + .iter() + .map(|region_route| region_route.region.id.region_number()) + .collect::>(), + TableRouteValue::Logical(x) => x + .region_ids() + .iter() + .map(|region_id| region_id.region_number()) + .collect::>(), + } + } +} + +impl TableMetaValue for TableRouteValue { + fn try_from_raw_value(raw_value: &[u8]) -> Result { + let r = serde_json::from_slice::(raw_value); + match r { + // Compatible with old TableRouteValue. + Err(e) if e.is_data() => Ok(Self::Physical( + serde_json::from_slice::(raw_value) + .context(SerdeJsonSnafu)?, + )), + Ok(x) => Ok(x), + Err(e) => Err(e).context(SerdeJsonSnafu), + } + } + + fn try_as_raw_value(&self) -> Result> { + serde_json::to_vec(self).context(SerdeJsonSnafu) + } +} + +impl PhysicalTableRouteValue { + pub fn new(region_routes: Vec) -> Self { + Self { + region_routes, + version: 0, + } + } +} + +impl LogicalTableRouteValue { + pub fn physical_table_id(&self) -> TableId { + todo!() + } + + pub fn region_ids(&self) -> Vec { + todo!() + } } impl TableMetaKey for TableRouteKey { @@ -301,3 +353,20 @@ impl TableRouteManager { .transpose() } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_table_route_compatibility() { + let old_raw_v = r#"{"region_routes":[{"region":{"id":1,"name":"r1","partition":null,"attrs":{}},"leader_peer":{"id":2,"addr":"a2"},"follower_peers":[]},{"region":{"id":1,"name":"r1","partition":null,"attrs":{}},"leader_peer":{"id":2,"addr":"a2"},"follower_peers":[]}],"version":0}"#; + let v = TableRouteValue::try_from_raw_value(old_raw_v.as_bytes()).unwrap(); + + let new_raw_v = format!("{:?}", v); + assert_eq!( + new_raw_v, + r#"Physical(PhysicalTableRouteValue { region_routes: [RegionRoute { region: Region { id: 1(0, 1), name: "r1", partition: None, attrs: {} }, leader_peer: Some(Peer { id: 2, addr: "a2" }), follower_peers: [], leader_status: None }, RegionRoute { region: Region { id: 1(0, 1), name: "r1", partition: None, attrs: {} }, leader_peer: Some(Peer { id: 2, addr: "a2" }), follower_peers: [], leader_status: None }], version: 0 })"# + ); + } +} diff --git a/src/common/meta/src/wal.rs b/src/common/meta/src/wal.rs index f80e396a8186..853c6fa5df63 100644 --- a/src/common/meta/src/wal.rs +++ b/src/common/meta/src/wal.rs @@ -17,6 +17,7 @@ pub mod options_allocator; use std::collections::HashMap; +use common_config::wal::StandaloneWalConfig; use serde::{Deserialize, Serialize}; use serde_with::with_prefix; @@ -29,21 +30,38 @@ pub use crate::wal::options_allocator::{ /// Wal config for metasrv. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] -#[serde(tag = "provider")] +#[serde(tag = "provider", rename_all = "snake_case")] pub enum WalConfig { #[default] - #[serde(rename = "raft_engine")] RaftEngine, - #[serde(rename = "kafka")] Kafka(KafkaConfig), } +impl From for WalConfig { + fn from(value: StandaloneWalConfig) -> Self { + match value { + StandaloneWalConfig::RaftEngine(config) => WalConfig::RaftEngine, + StandaloneWalConfig::Kafka(config) => WalConfig::Kafka(KafkaConfig { + broker_endpoints: config.base.broker_endpoints, + num_topics: config.num_topics, + selector_type: config.selector_type, + topic_name_prefix: config.topic_name_prefix, + num_partitions: config.num_partitions, + replication_factor: config.replication_factor, + create_topic_timeout: config.create_topic_timeout, + backoff: config.base.backoff, + }), + } + } +} + #[cfg(test)] mod tests { use std::time::Duration; + use common_config::wal::kafka::{KafkaBackoffConfig, TopicSelectorType}; + use super::*; - use crate::wal::kafka::topic_selector::SelectorType as KafkaTopicSelectorType; #[test] fn test_serde_wal_config() { @@ -57,7 +75,7 @@ mod tests { // Test serde raft-engine wal config with extra other wal config. let toml_str = r#" provider = "raft_engine" - broker_endpoints = ["127.0.0.1:9090"] + broker_endpoints = ["127.0.0.1:9092"] num_topics = 32 "#; let wal_config: WalConfig = toml::from_str(toml_str).unwrap(); @@ -66,12 +84,12 @@ mod tests { // Test serde kafka wal config. let toml_str = r#" provider = "kafka" - broker_endpoints = ["127.0.0.1:9090"] + broker_endpoints = ["127.0.0.1:9092"] num_topics = 32 selector_type = "round_robin" topic_name_prefix = "greptimedb_wal_topic" num_partitions = 1 - replication_factor = 3 + replication_factor = 1 create_topic_timeout = "30s" backoff_init = "500ms" backoff_max = "10s" @@ -80,17 +98,19 @@ mod tests { "#; let wal_config: WalConfig = toml::from_str(toml_str).unwrap(); let expected_kafka_config = KafkaConfig { - broker_endpoints: vec!["127.0.0.1:9090".to_string()], + broker_endpoints: vec!["127.0.0.1:9092".to_string()], num_topics: 32, - selector_type: KafkaTopicSelectorType::RoundRobin, + selector_type: TopicSelectorType::RoundRobin, topic_name_prefix: "greptimedb_wal_topic".to_string(), num_partitions: 1, - replication_factor: 3, + replication_factor: 1, create_topic_timeout: Duration::from_secs(30), - backoff_init: Duration::from_millis(500), - backoff_max: Duration::from_secs(10), - backoff_base: 2, - backoff_deadline: Some(Duration::from_secs(60 * 5)), + backoff: KafkaBackoffConfig { + init: Duration::from_millis(500), + max: Duration::from_secs(10), + base: 2, + deadline: Some(Duration::from_secs(60 * 5)), + }, }; assert_eq!(wal_config, WalConfig::Kafka(expected_kafka_config)); } diff --git a/src/common/meta/src/wal/kafka.rs b/src/common/meta/src/wal/kafka.rs index 173a74662d95..0a61b6015dfc 100644 --- a/src/common/meta/src/wal/kafka.rs +++ b/src/common/meta/src/wal/kafka.rs @@ -18,11 +18,12 @@ pub mod topic_selector; use std::time::Duration; +use common_config::wal::kafka::{kafka_backoff, KafkaBackoffConfig, TopicSelectorType}; +use common_config::wal::StandaloneWalConfig; use serde::{Deserialize, Serialize}; pub use crate::wal::kafka::topic::Topic; pub use crate::wal::kafka::topic_manager::TopicManager; -use crate::wal::kafka::topic_selector::SelectorType as TopicSelectorType; /// Configurations for kafka wal. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] @@ -42,36 +43,25 @@ pub struct KafkaConfig { /// Above which a topic creation operation will be cancelled. #[serde(with = "humantime_serde")] pub create_topic_timeout: Duration, - /// The initial backoff for kafka clients. - #[serde(with = "humantime_serde")] - pub backoff_init: Duration, - /// The maximum backoff for kafka clients. - #[serde(with = "humantime_serde")] - pub backoff_max: Duration, - /// Exponential backoff rate, i.e. next backoff = base * current backoff. - // Sets to u32 type since the `backoff_base` field in the KafkaConfig for datanode is of type u32, - // and we want to unify their types. - pub backoff_base: u32, - /// Stop reconnecting if the total wait time reaches the deadline. - /// If it's None, the reconnecting won't terminate. - #[serde(with = "humantime_serde")] - pub backoff_deadline: Option, + /// The backoff config. + #[serde(flatten, with = "kafka_backoff")] + pub backoff: KafkaBackoffConfig, } impl Default for KafkaConfig { fn default() -> Self { + let broker_endpoints = vec!["127.0.0.1:9092".to_string()]; + let replication_factor = broker_endpoints.len() as i16; + Self { - broker_endpoints: vec!["127.0.0.1:9090".to_string()], + broker_endpoints, num_topics: 64, selector_type: TopicSelectorType::RoundRobin, topic_name_prefix: "greptimedb_wal_topic".to_string(), num_partitions: 1, - replication_factor: 3, + replication_factor, create_topic_timeout: Duration::from_secs(30), - backoff_init: Duration::from_millis(500), - backoff_max: Duration::from_secs(10), - backoff_base: 2, - backoff_deadline: Some(Duration::from_secs(60 * 5)), // 5 mins + backoff: KafkaBackoffConfig::default(), } } } diff --git a/src/common/meta/src/wal/kafka/topic_manager.rs b/src/common/meta/src/wal/kafka/topic_manager.rs index f4b216cbfe1d..867e71b02fa4 100644 --- a/src/common/meta/src/wal/kafka/topic_manager.rs +++ b/src/common/meta/src/wal/kafka/topic_manager.rs @@ -16,10 +16,14 @@ use std::collections::HashSet; use std::sync::Arc; use std::time::Duration; -use common_telemetry::debug; +use common_config::wal::kafka::TopicSelectorType; +use common_telemetry::{debug, error, info}; +use rskafka::client::controller::ControllerClient; +use rskafka::client::error::Error as RsKafkaError; +use rskafka::client::error::ProtocolError::TopicAlreadyExists; use rskafka::client::ClientBuilder; use rskafka::BackoffConfig; -use snafu::{ensure, ResultExt}; +use snafu::{ensure, AsErrorSource, ResultExt}; use crate::error::{ BuildKafkaClientSnafu, BuildKafkaCtrlClientSnafu, CreateKafkaWalTopicSnafu, DecodeJsonSnafu, @@ -28,7 +32,7 @@ use crate::error::{ use crate::kv_backend::KvBackendRef; use crate::rpc::store::PutRequest; use crate::wal::kafka::topic::Topic; -use crate::wal::kafka::topic_selector::{RoundRobinTopicSelector, SelectorType, TopicSelectorRef}; +use crate::wal::kafka::topic_selector::{RoundRobinTopicSelector, TopicSelectorRef}; use crate::wal::kafka::KafkaConfig; const CREATED_TOPICS_KEY: &str = "__created_wal_topics/kafka/"; @@ -51,7 +55,7 @@ impl TopicManager { .collect::>(); let selector = match config.selector_type { - SelectorType::RoundRobin => RoundRobinTopicSelector::with_shuffle(), + TopicSelectorType::RoundRobin => RoundRobinTopicSelector::with_shuffle(), }; Self { @@ -78,7 +82,6 @@ impl TopicManager { .await? .into_iter() .collect::>(); - debug!("Restored {} topics", created_topics.len()); // Creates missing topics. let to_be_created = topics @@ -91,10 +94,10 @@ impl TopicManager { Some(i) }) .collect::>(); + if !to_be_created.is_empty() { self.try_create_topics(topics, &to_be_created).await?; Self::persist_created_topics(topics, &self.kv_backend).await?; - debug!("Persisted {} topics", topics.len()); } Ok(()) } @@ -103,10 +106,10 @@ impl TopicManager { async fn try_create_topics(&self, topics: &[Topic], to_be_created: &[usize]) -> Result<()> { // Builds an kafka controller client for creating topics. let backoff_config = BackoffConfig { - init_backoff: self.config.backoff_init, - max_backoff: self.config.backoff_max, - base: self.config.backoff_base as f64, - deadline: self.config.backoff_deadline, + init_backoff: self.config.backoff.init, + max_backoff: self.config.backoff.max, + base: self.config.backoff.base as f64, + deadline: self.config.backoff.deadline, }; let client = ClientBuilder::new(self.config.broker_endpoints.clone()) .backoff_config(backoff_config) @@ -118,23 +121,12 @@ impl TopicManager { .controller_client() .context(BuildKafkaCtrlClientSnafu)?; - // Spawns tokio tasks for creating missing topics. + // Try to create missing topics. let tasks = to_be_created .iter() - .map(|i| { - client.create_topic( - topics[*i].clone(), - self.config.num_partitions, - self.config.replication_factor, - self.config.create_topic_timeout.as_millis() as i32, - ) - }) + .map(|i| self.try_create_topic(&topics[*i], &client)) .collect::>(); - // FIXME(niebayes): try to create an already-exist topic would raise an error. - futures::future::try_join_all(tasks) - .await - .context(CreateKafkaWalTopicSnafu) - .map(|_| ()) + futures::future::try_join_all(tasks).await.map(|_| ()) } /// Selects one topic from the topic pool through the topic selector. @@ -149,6 +141,32 @@ impl TopicManager { .collect() } + async fn try_create_topic(&self, topic: &Topic, client: &ControllerClient) -> Result<()> { + match client + .create_topic( + topic.clone(), + self.config.num_partitions, + self.config.replication_factor, + self.config.create_topic_timeout.as_millis() as i32, + ) + .await + { + Ok(_) => { + info!("Successfully created topic {}", topic); + Ok(()) + } + Err(e) => { + if Self::is_topic_already_exist_err(&e) { + info!("The topic {} already exists", topic); + Ok(()) + } else { + error!("Failed to create a topic {}, error {:?}", topic, e); + Err(e).context(CreateKafkaWalTopicSnafu) + } + } + } + } + async fn restore_created_topics(kv_backend: &KvBackendRef) -> Result> { kv_backend .get(CREATED_TOPICS_KEY.as_bytes()) @@ -170,6 +188,16 @@ impl TopicManager { .await .map(|_| ()) } + + fn is_topic_already_exist_err(e: &RsKafkaError) -> bool { + matches!( + e, + &RsKafkaError::ServerError { + protocol_error: TopicAlreadyExists, + .. + } + ) + } } #[cfg(test)] diff --git a/src/common/meta/src/wal/kafka/topic_selector.rs b/src/common/meta/src/wal/kafka/topic_selector.rs index 6764cadcc990..fe7517bfd0b5 100644 --- a/src/common/meta/src/wal/kafka/topic_selector.rs +++ b/src/common/meta/src/wal/kafka/topic_selector.rs @@ -22,14 +22,6 @@ use snafu::ensure; use crate::error::{EmptyTopicPoolSnafu, Result}; use crate::wal::kafka::topic::Topic; -/// The type of the topic selector, i.e. with which strategy to select a topic. -#[derive(Default, Debug, Clone, Serialize, Deserialize, PartialEq)] -pub enum SelectorType { - #[default] - #[serde(rename = "round_robin")] - RoundRobin, -} - /// Controls topic selection. pub(crate) trait TopicSelector: Send + Sync { /// Selects a topic from the topic pool. diff --git a/src/common/runtime/Cargo.toml b/src/common/runtime/Cargo.toml index a4d1460f349a..629da4f4e0d4 100644 --- a/src/common/runtime/Cargo.toml +++ b/src/common/runtime/Cargo.toml @@ -14,6 +14,8 @@ once_cell.workspace = true paste.workspace = true prometheus.workspace = true snafu.workspace = true +tokio-metrics = "0.3" +tokio-metrics-collector = "0.2" tokio-util.workspace = true tokio.workspace = true diff --git a/src/common/runtime/src/runtime.rs b/src/common/runtime/src/runtime.rs index 6a776af25465..0ea041578e10 100644 --- a/src/common/runtime/src/runtime.rs +++ b/src/common/runtime/src/runtime.rs @@ -152,6 +152,7 @@ impl Builder { .build() .context(BuildRuntimeSnafu)?; + let name = self.runtime_name.clone(); let handle = runtime.handle().clone(); let (send_stop, recv_stop) = oneshot::channel(); // Block the runtime to shutdown. @@ -159,8 +160,11 @@ impl Builder { .name(format!("{}-blocker", self.thread_name)) .spawn(move || runtime.block_on(recv_stop)); + #[cfg(tokio_unstable)] + register_collector(name.clone(), &handle); + Ok(Runtime { - name: self.runtime_name.clone(), + name, handle, _dropper: Arc::new(Dropper { close: Some(send_stop), @@ -169,6 +173,14 @@ impl Builder { } } +#[cfg(tokio_unstable)] +pub fn register_collector(name: String, handle: &Handle) { + let name = name.replace("-", "_"); + let monitor = tokio_metrics::RuntimeMonitor::new(handle); + let collector = tokio_metrics_collector::RuntimeCollector::new(monitor, name); + let _ = prometheus::register(Box::new(collector)); +} + fn on_thread_start(thread_name: String) -> impl Fn() + 'static { move || { METRIC_RUNTIME_THREADS_ALIVE @@ -241,6 +253,13 @@ mod tests { assert!(metric_text.contains("runtime_threads_idle{thread_name=\"test_runtime_metric\"}")); assert!(metric_text.contains("runtime_threads_alive{thread_name=\"test_runtime_metric\"}")); + + #[cfg(tokio_unstable)] + { + assert!(metric_text.contains("runtime_0_tokio_budget_forced_yield_count 0")); + assert!(metric_text.contains("runtime_0_tokio_injection_queue_depth 0")); + assert!(metric_text.contains("runtime_0_tokio_workers_count 5")); + } } #[test] diff --git a/src/common/telemetry/src/logging.rs b/src/common/telemetry/src/logging.rs index f825d8f3835b..0b7c3ba1b818 100644 --- a/src/common/telemetry/src/logging.rs +++ b/src/common/telemetry/src/logging.rs @@ -43,6 +43,7 @@ pub struct LoggingOptions { pub enable_otlp_tracing: bool, pub otlp_endpoint: Option, pub tracing_sample_ratio: Option, + pub append_stdout: bool, } impl PartialEq for LoggingOptions { @@ -52,6 +53,7 @@ impl PartialEq for LoggingOptions { && self.enable_otlp_tracing == other.enable_otlp_tracing && self.otlp_endpoint == other.otlp_endpoint && self.tracing_sample_ratio == other.tracing_sample_ratio + && self.append_stdout == other.append_stdout } } @@ -65,6 +67,7 @@ impl Default for LoggingOptions { enable_otlp_tracing: false, otlp_endpoint: None, tracing_sample_ratio: None, + append_stdout: true, } } } @@ -129,10 +132,14 @@ pub fn init_global_logging( // Enable log compatible layer to convert log record to tracing span. LogTracer::init().expect("log tracer must be valid"); - // Stdout layer. - let (stdout_writer, stdout_guard) = tracing_appender::non_blocking(std::io::stdout()); - let stdout_logging_layer = Layer::new().with_writer(stdout_writer); - guards.push(stdout_guard); + let stdout_logging_layer = if opts.append_stdout { + let (stdout_writer, stdout_guard) = tracing_appender::non_blocking(std::io::stdout()); + guards.push(stdout_guard); + + Some(Layer::new().with_writer(stdout_writer)) + } else { + None + }; // JSON log layer. let rolling_appender = RollingFileAppender::new(Rotation::HOURLY, dir, app_name); @@ -184,7 +191,7 @@ pub fn init_global_logging( None }; - let stdout_logging_layer = stdout_logging_layer.with_filter(filter.clone()); + let stdout_logging_layer = stdout_logging_layer.map(|x| x.with_filter(filter.clone())); let file_logging_layer = file_logging_layer.with_filter(filter); diff --git a/src/frontend/src/instance/standalone.rs b/src/frontend/src/instance/standalone.rs index d46ee3d45886..21496e28edc5 100644 --- a/src/frontend/src/instance/standalone.rs +++ b/src/frontend/src/instance/standalone.rs @@ -18,10 +18,14 @@ use std::sync::Arc; use api::v1::region::{QueryRequest, RegionRequest, RegionResponse}; use async_trait::async_trait; use client::region::check_response_header; +use common_catalog::consts::METRIC_ENGINE; use common_error::ext::BoxedError; use common_meta::datanode_manager::{AffectedRows, Datanode, DatanodeManager, DatanodeRef}; use common_meta::ddl::{TableMetadata, TableMetadataAllocator, TableMetadataAllocatorContext}; use common_meta::error::{self as meta_error, Result as MetaResult, UnsupportedSnafu}; +use common_meta::key::table_route::{ + LogicalTableRouteValue, PhysicalTableRouteValue, TableRouteValue, +}; use common_meta::peer::Peer; use common_meta::rpc::ddl::CreateTableTask; use common_meta::rpc::router::{Region, RegionRoute}; @@ -34,7 +38,7 @@ use common_telemetry::{debug, info, tracing}; use datanode::region_server::RegionServer; use servers::grpc::region_server::RegionServerHandler; use snafu::{ensure, OptionExt, ResultExt}; -use store_api::storage::{RegionId, TableId}; +use store_api::storage::{RegionId, RegionNumber, TableId}; use crate::error::{InvalidRegionRequestSnafu, InvokeRegionServerSnafu, Result}; @@ -151,17 +155,29 @@ impl StandaloneTableMetadataAllocator { }; Ok(table_id) } -} -#[async_trait] -impl TableMetadataAllocator for StandaloneTableMetadataAllocator { - async fn create( + fn create_wal_options( &self, - _ctx: &TableMetadataAllocatorContext, - task: &CreateTableTask, - ) -> MetaResult { - let table_id = self.allocate_table_id(task).await?; + table_route: &TableRouteValue, + ) -> MetaResult> { + match table_route { + TableRouteValue::Physical(x) => { + let region_numbers = x + .region_routes + .iter() + .map(|route| route.region.id.region_number()) + .collect(); + allocate_region_wal_options(region_numbers, &self.wal_options_allocator) + } + TableRouteValue::Logical(_) => Ok(HashMap::new()), + } + } +} +fn create_table_route(table_id: TableId, task: &CreateTableTask) -> TableRouteValue { + if task.create_table.engine == METRIC_ENGINE { + TableRouteValue::Logical(LogicalTableRouteValue {}) + } else { let region_routes = task .partitions .iter() @@ -182,13 +198,22 @@ impl TableMetadataAllocator for StandaloneTableMetadataAllocator { } }) .collect::>(); + TableRouteValue::Physical(PhysicalTableRouteValue::new(region_routes)) + } +} - let region_numbers = region_routes - .iter() - .map(|route| route.region.id.region_number()) - .collect(); - let region_wal_options = - allocate_region_wal_options(region_numbers, &self.wal_options_allocator)?; +#[async_trait] +impl TableMetadataAllocator for StandaloneTableMetadataAllocator { + async fn create( + &self, + _ctx: &TableMetadataAllocatorContext, + task: &CreateTableTask, + ) -> MetaResult { + let table_id = self.allocate_table_id(task).await?; + + let table_route = create_table_route(table_id, task); + + let region_wal_options = self.create_wal_options(&table_route)?; debug!( "Allocated region wal options {:?} for table {}", @@ -197,8 +222,8 @@ impl TableMetadataAllocator for StandaloneTableMetadataAllocator { Ok(TableMetadata { table_id, - region_routes, - region_wal_options: HashMap::default(), + table_route, + region_wal_options, }) } } diff --git a/src/log-store/src/error.rs b/src/log-store/src/error.rs index 1ee344046adc..7f475e2076a8 100644 --- a/src/log-store/src/error.rs +++ b/src/log-store/src/error.rs @@ -20,6 +20,8 @@ use common_macro::stack_trace_debug; use common_runtime::error::Error as RuntimeError; use snafu::{Location, Snafu}; +use crate::kafka::NamespaceImpl as KafkaNamespace; + #[derive(Snafu)] #[snafu(visibility(pub))] #[stack_trace_debug] @@ -152,16 +154,17 @@ pub enum Error { error: rskafka::client::producer::Error, }, - #[snafu(display( - "Failed to read a record from Kafka, topic: {}, region_id: {}, offset: {}", - topic, - region_id, - offset, - ))] + #[snafu(display("Failed to read a record from Kafka, ns: {}", ns))] ConsumeRecord { - topic: String, - region_id: u64, - offset: i64, + ns: KafkaNamespace, + location: Location, + #[snafu(source)] + error: rskafka::client::error::Error, + }, + + #[snafu(display("Failed to get the latest offset, ns: {}", ns))] + GetOffset { + ns: KafkaNamespace, location: Location, #[snafu(source)] error: rskafka::client::error::Error, diff --git a/src/log-store/src/kafka.rs b/src/log-store/src/kafka.rs index 3dba4a352b3c..a08afc508eac 100644 --- a/src/log-store/src/kafka.rs +++ b/src/log-store/src/kafka.rs @@ -17,6 +17,8 @@ pub mod log_store; mod offset; mod record_utils; +use std::fmt::Display; + use common_meta::wal::KafkaWalTopic as Topic; use serde::{Deserialize, Serialize}; use store_api::logstore::entry::{Entry, Id as EntryId}; @@ -37,6 +39,12 @@ impl Namespace for NamespaceImpl { } } +impl Display for NamespaceImpl { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}/{}", self.topic, self.region_id) + } +} + /// Kafka Entry implementation. #[derive(Debug, PartialEq, Clone)] pub struct EntryImpl { @@ -64,3 +72,15 @@ impl Entry for EntryImpl { self.ns.clone() } } + +impl Display for EntryImpl { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "Entry (ns: {}, id: {}, data_len: {})", + self.ns, + self.id, + self.data.len() + ) + } +} diff --git a/src/log-store/src/kafka/client_manager.rs b/src/log-store/src/kafka/client_manager.rs index 9aa27bf1b3fd..e272840201bb 100644 --- a/src/log-store/src/kafka/client_manager.rs +++ b/src/log-store/src/kafka/client_manager.rs @@ -75,10 +75,10 @@ impl ClientManager { pub(crate) async fn try_new(config: &KafkaConfig) -> Result { // Sets backoff config for the top-level kafka client and all clients constructed by it. let backoff_config = BackoffConfig { - init_backoff: config.backoff_init, - max_backoff: config.backoff_max, - base: config.backoff_base as f64, - deadline: config.backoff_deadline, + init_backoff: config.backoff.init, + max_backoff: config.backoff.max, + base: config.backoff.base as f64, + deadline: config.backoff.deadline, }; let client = ClientBuilder::new(config.broker_endpoints.clone()) .backoff_config(backoff_config) diff --git a/src/log-store/src/kafka/log_store.rs b/src/log-store/src/kafka/log_store.rs index 4ff054712ff3..73b0fe1de2a9 100644 --- a/src/log-store/src/kafka/log_store.rs +++ b/src/log-store/src/kafka/log_store.rs @@ -16,17 +16,20 @@ use std::collections::HashMap; use std::sync::Arc; use common_config::wal::{KafkaConfig, WalOptions}; +use common_telemetry::{debug, warn}; use futures_util::StreamExt; use rskafka::client::consumer::{StartOffset, StreamConsumerBuilder}; +use rskafka::client::partition::OffsetAt; +use snafu::ResultExt; use store_api::logstore::entry::Id as EntryId; use store_api::logstore::entry_stream::SendableEntryStream; use store_api::logstore::namespace::Id as NamespaceId; use store_api::logstore::{AppendBatchResponse, AppendResponse, LogStore}; -use crate::error::{Error, Result}; +use crate::error::{ConsumeRecordSnafu, Error, GetOffsetSnafu, Result}; use crate::kafka::client_manager::{ClientManager, ClientManagerRef}; use crate::kafka::offset::Offset; -use crate::kafka::record_utils::{handle_consume_result, RecordProducer}; +use crate::kafka::record_utils::{decode_from_record, RecordProducer}; use crate::kafka::{EntryImpl, NamespaceImpl}; /// A log store backed by Kafka. @@ -82,6 +85,8 @@ impl LogStore for KafkaLogStore { /// Appends a batch of entries and returns a response containing a map where the key is a region id /// while the value is the id of the last successfully written entry of the region. async fn append_batch(&self, entries: Vec) -> Result { + debug!("LogStore handles append_batch with entries {:?}", entries); + if entries.is_empty() { return Ok(AppendBatchResponse::default()); } @@ -97,6 +102,7 @@ impl LogStore for KafkaLogStore { // Builds a record from entries belong to a region and produces them to kafka server. let region_ids = producers.keys().cloned().collect::>(); + let tasks = producers .into_values() .map(|producer| producer.produce(&self.client_manager)) @@ -108,6 +114,8 @@ impl LogStore for KafkaLogStore { .into_iter() .map(TryInto::try_into) .collect::>>()?; + debug!("The entries are appended at offsets {:?}", entry_ids); + Ok(AppendBatchResponse { last_entry_ids: region_ids.into_iter().zip(entry_ids).collect(), }) @@ -131,15 +139,72 @@ impl LogStore for KafkaLogStore { .raw_client .clone(); - // Reads the entries starting from exactly the specified offset. - let offset = Offset::try_from(entry_id)?.0; - let mut stream_consumer = StreamConsumerBuilder::new(client, StartOffset::At(offset)) + // Gets the offset of the latest record in the topic. Actually, it's the latest record of the single partition in the topic. + // The read operation terminates when this record is consumed. + // Warning: the `get_offset` returns the end offset of the latest record. For our usage, it should be decremented. + // See: https://kafka.apache.org/36/javadoc/org/apache/kafka/clients/consumer/KafkaConsumer.html#endOffsets(java.util.Collection) + let end_offset = client + .get_offset(OffsetAt::Latest) + .await + .context(GetOffsetSnafu { ns: ns.clone() })? + - 1; + // Reads entries with offsets in the range [start_offset, end_offset). + let start_offset = Offset::try_from(entry_id)?.0; + + // Abort if there're no new entries. + // FIXME(niebayes): how come this case happens? + if start_offset > end_offset { + warn!( + "No new entries for ns {} in range [{}, {})", + ns, start_offset, end_offset + ); + return Ok(futures_util::stream::empty().boxed()); + } + + let mut stream_consumer = StreamConsumerBuilder::new(client, StartOffset::At(start_offset)) .with_max_batch_size(self.config.max_batch_size.as_bytes() as i32) - .with_max_wait_ms(self.config.max_wait_time.as_millis() as i32) + .with_max_wait_ms(self.config.produce_record_timeout.as_millis() as i32) .build(); + + debug!( + "Built a stream consumer for ns {} to consume entries in range [{}, {})", + ns, start_offset, end_offset + ); + + let ns_clone = ns.clone(); let stream = async_stream::stream!({ while let Some(consume_result) = stream_consumer.next().await { - yield handle_consume_result(consume_result, &topic, region_id, offset); + // Each next will prdoce a `RecordAndOffset` and a high watermark offset. + // The `RecordAndOffset` contains the record data and its start offset. + // The high watermark offset is the end offset of the latest record in the partition. + let (record, high_watermark) = consume_result.context(ConsumeRecordSnafu { + ns: ns_clone.clone(), + })?; + let record_offset = record.offset; + debug!( + "Read a record at offset {} for ns {}, high watermark: {}", + record_offset, ns_clone, high_watermark + ); + + let entries = decode_from_record(record.record)?; + + // Filters entries by region id. + if let Some(entry) = entries.first() + && entry.ns.region_id == region_id + { + yield Ok(entries); + } else { + yield Ok(vec![]); + } + + // Terminates the stream if the entry with the end offset was read. + if record_offset >= end_offset { + debug!( + "Stream consumer for ns {} terminates at offset {}", + ns_clone, record_offset + ); + break; + } } }); Ok(Box::pin(stream)) diff --git a/src/log-store/src/kafka/record_utils.rs b/src/log-store/src/kafka/record_utils.rs index 37a66acfbdb3..3707b873f3e3 100644 --- a/src/log-store/src/kafka/record_utils.rs +++ b/src/log-store/src/kafka/record_utils.rs @@ -12,21 +12,18 @@ // See the License for the specific language governing permissions and // limitations under the License. -use common_config::wal::KafkaWalTopic as Topic; -use rskafka::record::{Record, RecordAndOffset}; +use rskafka::record::Record; use serde::{Deserialize, Serialize}; use snafu::{ensure, OptionExt, ResultExt}; use crate::error::{ - ConsumeRecordSnafu, DecodeMetaSnafu, EmptyEntriesSnafu, EncodeMetaSnafu, GetClientSnafu, - MissingKeySnafu, MissingValueSnafu, ProduceRecordSnafu, Result, + DecodeMetaSnafu, EmptyEntriesSnafu, EncodeMetaSnafu, GetClientSnafu, MissingKeySnafu, + MissingValueSnafu, ProduceRecordSnafu, Result, }; use crate::kafka::client_manager::ClientManagerRef; use crate::kafka::offset::Offset; use crate::kafka::{EntryId, EntryImpl, NamespaceImpl}; -type ConsumeResult = std::result::Result<(RecordAndOffset, i64), rskafka::client::error::Error>; - /// Record metadata which will be serialized/deserialized to/from the `key` of a Record. #[derive(Debug, Serialize, Deserialize, PartialEq)] struct RecordMeta { @@ -125,7 +122,7 @@ fn encode_to_record(ns: NamespaceImpl, entries: Vec) -> Result Result> { +pub(crate) fn decode_from_record(record: Record) -> Result> { let key = record.key.context(MissingKeySnafu)?; let value = record.value.context(MissingValueSnafu)?; let meta: RecordMeta = serde_json::from_slice(&key).context(DecodeMetaSnafu)?; @@ -144,34 +141,6 @@ fn decode_from_record(record: Record) -> Result> { Ok(entries) } -/// Handles the result of a consume operation on a kafka topic. -pub(crate) fn handle_consume_result( - result: ConsumeResult, - topic: &Topic, - region_id: u64, - offset: i64, -) -> Result> { - match result { - Ok((record_and_offset, _)) => { - // Only produces entries belong to the region with the given region id. - // Since a record only contains entries from a single region, it suffices to check the first entry only. - let entries = decode_from_record(record_and_offset.record)?; - if let Some(entry) = entries.first() - && entry.id == region_id - { - Ok(entries) - } else { - Ok(vec![]) - } - } - Err(e) => Err(e).context(ConsumeRecordSnafu { - topic, - region_id, - offset, - }), - } -} - #[cfg(test)] mod tests { use super::*; diff --git a/src/meta-srv/src/handler/region_lease_handler.rs b/src/meta-srv/src/handler/region_lease_handler.rs index d3433179fea0..eb792cf9ecd2 100644 --- a/src/meta-srv/src/handler/region_lease_handler.rs +++ b/src/meta-srv/src/handler/region_lease_handler.rs @@ -104,6 +104,7 @@ mod test { use std::sync::Arc; use common_meta::distributed_time_constants; + use common_meta::key::table_route::TableRouteValue; use common_meta::key::test_utils::new_test_table_info; use common_meta::key::TableMetadataManager; use common_meta::kv_backend::memory::MemoryKvBackend; @@ -161,7 +162,11 @@ mod test { let table_metadata_manager = keeper.table_metadata_manager(); table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) + .create_table_metadata( + table_info, + TableRouteValue::physical(region_routes), + HashMap::default(), + ) .await .unwrap(); @@ -303,7 +308,11 @@ mod test { let table_metadata_manager = keeper.table_metadata_manager(); table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) + .create_table_metadata( + table_info, + TableRouteValue::physical(region_routes), + HashMap::default(), + ) .await .unwrap(); diff --git a/src/meta-srv/src/procedure/region_migration/migration_start.rs b/src/meta-srv/src/procedure/region_migration/migration_start.rs index cd9b5bad5a5d..fa84a1a6dd5e 100644 --- a/src/meta-srv/src/procedure/region_migration/migration_start.rs +++ b/src/meta-srv/src/procedure/region_migration/migration_start.rs @@ -137,7 +137,6 @@ impl RegionMigrationStart { #[cfg(test)] mod tests { use std::assert_matches::assert_matches; - use std::collections::HashMap; use common_meta::key::test_utils::new_test_table_info; use common_meta::peer::Peer; @@ -187,10 +186,8 @@ mod tests { ..Default::default() }; - env.table_metadata_manager() - .create_table_metadata(table_info, vec![region_route], HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, vec![region_route]) + .await; let err = state .retrieve_region_route(&mut ctx, RegionId::new(1024, 3)) @@ -221,10 +218,8 @@ mod tests { ..Default::default() }]; - env.table_metadata_manager() - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; let (next, _) = state.next(&mut ctx).await.unwrap(); @@ -254,10 +249,8 @@ mod tests { ..Default::default() }]; - env.table_metadata_manager() - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; let (next, _) = state.next(&mut ctx).await.unwrap(); @@ -281,10 +274,8 @@ mod tests { ..Default::default() }]; - env.table_metadata_manager() - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; let (next, _) = state.next(&mut ctx).await.unwrap(); diff --git a/src/meta-srv/src/procedure/region_migration/open_candidate_region.rs b/src/meta-srv/src/procedure/region_migration/open_candidate_region.rs index dc6ebb2f4df9..74b904ce0105 100644 --- a/src/meta-srv/src/procedure/region_migration/open_candidate_region.rs +++ b/src/meta-srv/src/procedure/region_migration/open_candidate_region.rs @@ -187,6 +187,7 @@ mod tests { use std::assert_matches::assert_matches; use common_catalog::consts::MITO2_ENGINE; + use common_meta::key::table_route::TableRouteValue; use common_meta::key::test_utils::new_test_table_info; use common_meta::peer::Peer; use common_meta::rpc::router::{Region, RegionRoute}; @@ -409,7 +410,11 @@ mod tests { }]; env.table_metadata_manager() - .create_table_metadata(table_info, region_routes, HashMap::default()) + .create_table_metadata( + table_info, + TableRouteValue::physical(region_routes), + HashMap::default(), + ) .await .unwrap(); diff --git a/src/meta-srv/src/procedure/region_migration/test_util.rs b/src/meta-srv/src/procedure/region_migration/test_util.rs index 6496c18ee516..b34b1e655f4c 100644 --- a/src/meta-srv/src/procedure/region_migration/test_util.rs +++ b/src/meta-srv/src/procedure/region_migration/test_util.rs @@ -22,6 +22,7 @@ use api::v1::meta::{HeartbeatResponse, MailboxMessage, RequestHeader}; use common_meta::instruction::{ DowngradeRegionReply, InstructionReply, SimpleReply, UpgradeRegionReply, }; +use common_meta::key::table_route::TableRouteValue; use common_meta::key::{TableMetadataManager, TableMetadataManagerRef}; use common_meta::kv_backend::memory::MemoryKvBackend; use common_meta::peer::Peer; @@ -144,6 +145,22 @@ impl TestingEnv { provider: Arc::new(MockContextProvider::default()), } } + + // Creates a table metadata with the physical table route. + pub async fn create_physical_table_metadata( + &self, + table_info: RawTableInfo, + region_routes: Vec, + ) { + self.table_metadata_manager + .create_table_metadata( + table_info, + TableRouteValue::physical(region_routes), + HashMap::default(), + ) + .await + .unwrap(); + } } /// Generates a [InstructionReply::OpenRegion] reply. @@ -369,7 +386,11 @@ impl ProcedureMigrationTestSuite { ) { self.env .table_metadata_manager() - .create_table_metadata(table_info, region_routes, HashMap::default()) + .create_table_metadata( + table_info, + TableRouteValue::physical(region_routes), + HashMap::default(), + ) .await .unwrap(); } diff --git a/src/meta-srv/src/procedure/region_migration/update_metadata/downgrade_leader_region.rs b/src/meta-srv/src/procedure/region_migration/update_metadata/downgrade_leader_region.rs index 05dbb1935f19..5a76d34819e7 100644 --- a/src/meta-srv/src/procedure/region_migration/update_metadata/downgrade_leader_region.rs +++ b/src/meta-srv/src/procedure/region_migration/update_metadata/downgrade_leader_region.rs @@ -74,7 +74,6 @@ impl UpdateMetadata { #[cfg(test)] mod tests { use std::assert_matches::assert_matches; - use std::collections::HashMap; use common_meta::key::test_utils::new_test_table_info; use common_meta::peer::Peer; @@ -136,12 +135,10 @@ mod tests { }, ]; - let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; + let table_metadata_manager = env.table_metadata_manager(); let original_table_route = table_metadata_manager .table_route_manager() .get(table_id) @@ -190,11 +187,10 @@ mod tests { ..Default::default() }]; + env.create_physical_table_metadata(table_info, region_routes) + .await; + let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); let (next, _) = state.next(&mut ctx).await.unwrap(); @@ -233,11 +229,10 @@ mod tests { ..Default::default() }]; + env.create_physical_table_metadata(table_info, region_routes) + .await; + let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); let (next, _) = state.next(&mut ctx).await.unwrap(); diff --git a/src/meta-srv/src/procedure/region_migration/update_metadata/rollback_downgraded_region.rs b/src/meta-srv/src/procedure/region_migration/update_metadata/rollback_downgraded_region.rs index e7fa73dedf8d..7281737752a4 100644 --- a/src/meta-srv/src/procedure/region_migration/update_metadata/rollback_downgraded_region.rs +++ b/src/meta-srv/src/procedure/region_migration/update_metadata/rollback_downgraded_region.rs @@ -59,7 +59,6 @@ impl UpdateMetadata { #[cfg(test)] mod tests { use std::assert_matches::assert_matches; - use std::collections::HashMap; use common_meta::key::test_utils::new_test_table_info; use common_meta::peer::Peer; @@ -128,12 +127,10 @@ mod tests { region_routes }; - let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; + let table_metadata_manager = env.table_metadata_manager(); let old_table_route = table_metadata_manager .table_route_manager() .get(table_id) @@ -213,11 +210,10 @@ mod tests { region_routes }; + env.create_physical_table_metadata(table_info, region_routes) + .await; + let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); let (next, _) = state.next(&mut ctx).await.unwrap(); diff --git a/src/meta-srv/src/procedure/region_migration/update_metadata/upgrade_candidate_region.rs b/src/meta-srv/src/procedure/region_migration/update_metadata/upgrade_candidate_region.rs index bb86280ba000..597d9afe9a7b 100644 --- a/src/meta-srv/src/procedure/region_migration/update_metadata/upgrade_candidate_region.rs +++ b/src/meta-srv/src/procedure/region_migration/update_metadata/upgrade_candidate_region.rs @@ -176,7 +176,6 @@ impl UpdateMetadata { #[cfg(test)] mod tests { use std::assert_matches::assert_matches; - use std::collections::HashMap; use common_meta::key::test_utils::new_test_table_info; use common_meta::peer::Peer; @@ -225,11 +224,8 @@ mod tests { ..Default::default() }]; - let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; let err = state .build_upgrade_candidate_region_metadata(&mut ctx) @@ -254,11 +250,8 @@ mod tests { ..Default::default() }]; - let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; let err = state .build_upgrade_candidate_region_metadata(&mut ctx) @@ -285,11 +278,8 @@ mod tests { leader_status: Some(RegionStatus::Downgraded), }]; - let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; let new_region_routes = state .build_upgrade_candidate_region_metadata(&mut ctx) @@ -326,12 +316,10 @@ mod tests { }, ]; - let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; + let table_metadata_manager = env.table_metadata_manager(); let original_table_route = table_metadata_manager .table_route_manager() .get(table_id) @@ -385,11 +373,8 @@ mod tests { leader_status: None, }]; - let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; let updated = state.check_metadata_updated(&mut ctx).await.unwrap(); assert!(!updated); @@ -411,11 +396,8 @@ mod tests { leader_status: None, }]; - let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; let updated = state.check_metadata_updated(&mut ctx).await.unwrap(); assert!(updated); @@ -437,11 +419,8 @@ mod tests { leader_status: Some(RegionStatus::Downgraded), }]; - let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); + env.create_physical_table_metadata(table_info, region_routes) + .await; let err = state.check_metadata_updated(&mut ctx).await.unwrap_err(); assert_matches!(err, Error::Unexpected { .. }); @@ -470,11 +449,10 @@ mod tests { .unwrap(); ctx.volatile_ctx.opening_region_guard = Some(guard); + env.create_physical_table_metadata(table_info, region_routes) + .await; + let table_metadata_manager = env.table_metadata_manager(); - table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) - .await - .unwrap(); let (next, _) = state.next(&mut ctx).await.unwrap(); diff --git a/src/meta-srv/src/procedure/tests.rs b/src/meta-srv/src/procedure/tests.rs index e7b8a681138c..9ffad3aa6cf9 100644 --- a/src/meta-srv/src/procedure/tests.rs +++ b/src/meta-srv/src/procedure/tests.rs @@ -100,12 +100,12 @@ fn test_region_request_builder() { let procedure = CreateTableProcedure::new( 1, create_table_task(), - test_data::new_region_routes(), + TableRouteValue::physical(test_data::new_region_routes()), HashMap::default(), test_data::new_ddl_context(Arc::new(DatanodeClients::default())), ); - let template = procedure.new_region_request_builder().unwrap(); + let template = procedure.new_region_request_builder(None).unwrap(); let expected = PbCreateRegionRequest { region_id: 0, @@ -191,7 +191,7 @@ async fn test_on_datanode_create_regions() { let mut procedure = CreateTableProcedure::new( 1, create_table_task(), - region_routes, + TableRouteValue::physical(region_routes), HashMap::default(), test_data::new_ddl_context(datanode_manager), ); @@ -247,7 +247,7 @@ async fn test_on_datanode_drop_regions() { let procedure = DropTableProcedure::new( 1, drop_table_task, - DeserializedValueWithBytes::from_inner(TableRouteValue::new(region_routes)), + DeserializedValueWithBytes::from_inner(TableRouteValue::physical(region_routes)), DeserializedValueWithBytes::from_inner(TableInfoValue::new(test_data::new_table_info())), test_data::new_ddl_context(datanode_manager), ); @@ -373,7 +373,7 @@ async fn test_submit_alter_region_requests() { .table_metadata_manager .create_table_metadata( table_info.clone(), - region_routes.clone(), + TableRouteValue::physical(region_routes), HashMap::default(), ) .await diff --git a/src/meta-srv/src/region/lease_keeper.rs b/src/meta-srv/src/region/lease_keeper.rs index b555d2e780dd..cbd2451896b1 100644 --- a/src/meta-srv/src/region/lease_keeper.rs +++ b/src/meta-srv/src/region/lease_keeper.rs @@ -188,6 +188,7 @@ mod tests { use std::collections::{HashMap, HashSet}; use std::sync::Arc; + use common_meta::key::table_route::TableRouteValue; use common_meta::key::test_utils::new_test_table_info; use common_meta::key::TableMetadataManager; use common_meta::kv_backend::memory::MemoryKvBackend; @@ -291,7 +292,11 @@ mod tests { let keeper = new_test_keeper(); let table_metadata_manager = keeper.table_metadata_manager(); table_metadata_manager - .create_table_metadata(table_info, vec![region_route.clone()], HashMap::default()) + .create_table_metadata( + table_info, + TableRouteValue::physical(vec![region_route]), + HashMap::default(), + ) .await .unwrap(); @@ -378,7 +383,11 @@ mod tests { let keeper = new_test_keeper(); let table_metadata_manager = keeper.table_metadata_manager(); table_metadata_manager - .create_table_metadata(table_info, vec![region_route.clone()], HashMap::default()) + .create_table_metadata( + table_info, + TableRouteValue::physical(vec![region_route]), + HashMap::default(), + ) .await .unwrap(); diff --git a/src/meta-srv/src/selector.rs b/src/meta-srv/src/selector.rs index 44ce0b2c8cb4..4c3a91caef2b 100644 --- a/src/meta-srv/src/selector.rs +++ b/src/meta-srv/src/selector.rs @@ -56,6 +56,7 @@ impl Default for SelectorOptions { } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(try_from = "String")] pub enum SelectorType { #[default] LoadBased, @@ -77,6 +78,14 @@ impl TryFrom<&str> for SelectorType { } } +impl TryFrom for SelectorType { + type Error = error::Error; + + fn try_from(value: String) -> Result { + SelectorType::try_from(value.as_str()) + } +} + #[cfg(test)] mod tests { use super::SelectorType; diff --git a/src/meta-srv/src/table_meta_alloc.rs b/src/meta-srv/src/table_meta_alloc.rs index 12fac723b430..21e5778209f7 100644 --- a/src/meta-srv/src/table_meta_alloc.rs +++ b/src/meta-srv/src/table_meta_alloc.rs @@ -12,17 +12,23 @@ // See the License for the specific language governing permissions and // limitations under the License. -use common_catalog::format_full_table_name; +use std::collections::HashMap; + +use common_catalog::consts::METRIC_ENGINE; use common_error::ext::BoxedError; use common_meta::ddl::{TableMetadata, TableMetadataAllocator, TableMetadataAllocatorContext}; -use common_meta::error::{self as meta_error, Result as MetaResult}; +use common_meta::error::{ExternalSnafu, Result as MetaResult}; +use common_meta::key::table_route::{ + LogicalTableRouteValue, PhysicalTableRouteValue, TableRouteValue, +}; use common_meta::rpc::ddl::CreateTableTask; use common_meta::rpc::router::{Region, RegionRoute}; use common_meta::sequence::SequenceRef; use common_meta::wal::{allocate_region_wal_options, WalOptionsAllocatorRef}; -use common_telemetry::{debug, warn}; +use common_meta::ClusterId; +use common_telemetry::debug; use snafu::{ensure, ResultExt}; -use store_api::storage::{RegionId, TableId, MAX_REGION_SEQ}; +use store_api::storage::{RegionId, RegionNumber, TableId, MAX_REGION_SEQ}; use crate::error::{self, Result, TooManyPartitionsSnafu}; use crate::metasrv::{SelectorContext, SelectorRef}; @@ -49,6 +55,83 @@ impl MetaSrvTableMetadataAllocator { wal_options_allocator, } } + + async fn create_table_route( + &self, + cluster_id: ClusterId, + table_id: TableId, + task: &CreateTableTask, + ) -> Result { + let table_route = if task.create_table.engine == METRIC_ENGINE { + TableRouteValue::Logical(LogicalTableRouteValue {}) + } else { + let regions = task.partitions.len(); + + ensure!(regions <= MAX_REGION_SEQ as usize, TooManyPartitionsSnafu); + + let mut peers = self + .selector + .select( + cluster_id, + &self.ctx, + SelectorOptions { + min_required_items: regions, + allow_duplication: true, + }, + ) + .await?; + + ensure!( + peers.len() >= regions, + error::NoEnoughAvailableDatanodeSnafu { + required: regions, + available: peers.len(), + } + ); + + peers.truncate(regions); + + let region_routes = task + .partitions + .iter() + .enumerate() + .map(|(i, partition)| { + let region = Region { + id: RegionId::new(table_id, i as RegionNumber), + partition: Some(partition.clone().into()), + ..Default::default() + }; + + let peer = peers[i % peers.len()].clone(); + + RegionRoute { + region, + leader_peer: Some(peer.into()), + ..Default::default() + } + }) + .collect::>(); + TableRouteValue::Physical(PhysicalTableRouteValue::new(region_routes)) + }; + Ok(table_route) + } + + fn create_wal_options( + &self, + table_route: &TableRouteValue, + ) -> MetaResult> { + match table_route { + TableRouteValue::Physical(x) => { + let region_numbers = x + .region_routes + .iter() + .map(|route| route.region.id.region_number()) + .collect(); + allocate_region_wal_options(region_numbers, &self.wal_options_allocator) + } + TableRouteValue::Logical(_) => Ok(HashMap::new()), + } + } } #[async_trait::async_trait] @@ -58,23 +141,15 @@ impl TableMetadataAllocator for MetaSrvTableMetadataAllocator { ctx: &TableMetadataAllocatorContext, task: &CreateTableTask, ) -> MetaResult { - let (table_id, region_routes) = handle_create_region_routes( - ctx.cluster_id, - task, - &self.ctx, - &self.selector, - &self.table_id_sequence, - ) - .await - .map_err(BoxedError::new) - .context(meta_error::ExternalSnafu)?; - - let region_numbers = region_routes - .iter() - .map(|route| route.region.id.region_number()) - .collect(); - let region_wal_options = - allocate_region_wal_options(region_numbers, &self.wal_options_allocator)?; + let table_id = self.table_id_sequence.next().await? as TableId; + + let table_route = self + .create_table_route(ctx.cluster_id, table_id, task) + .await + .map_err(BoxedError::new) + .context(ExternalSnafu)?; + + let region_wal_options = self.create_wal_options(&table_route)?; debug!( "Allocated region wal options {:?} for table {}", @@ -83,84 +158,8 @@ impl TableMetadataAllocator for MetaSrvTableMetadataAllocator { Ok(TableMetadata { table_id, - region_routes, + table_route, region_wal_options, }) } } - -/// pre-allocates create table's table id and region routes. -async fn handle_create_region_routes( - cluster_id: u64, - task: &CreateTableTask, - ctx: &SelectorContext, - selector: &SelectorRef, - table_id_sequence: &SequenceRef, -) -> Result<(TableId, Vec)> { - let table_info = &task.table_info; - let partitions = &task.partitions; - - let mut peers = selector - .select( - cluster_id, - ctx, - SelectorOptions { - min_required_items: partitions.len(), - allow_duplication: true, - }, - ) - .await?; - - if peers.len() < partitions.len() { - warn!( - "Create table failed due to no enough available datanodes, table: {}, partition number: {}, datanode number: {}", - format_full_table_name( - &table_info.catalog_name, - &table_info.schema_name, - &table_info.name - ), - partitions.len(), - peers.len() - ); - return error::NoEnoughAvailableDatanodeSnafu { - required: partitions.len(), - available: peers.len(), - } - .fail(); - } - - // We don't need to keep all peers, just truncate it to the number of partitions. - // If the peers are not enough, some peers will be used for multiple partitions. - peers.truncate(partitions.len()); - - let table_id = table_id_sequence - .next() - .await - .context(error::NextSequenceSnafu)? as u32; - - ensure!( - partitions.len() <= MAX_REGION_SEQ as usize, - TooManyPartitionsSnafu - ); - - let region_routes = partitions - .iter() - .enumerate() - .map(|(i, partition)| { - let region = Region { - id: RegionId::new(table_id, i as u32), - partition: Some(partition.clone().into()), - ..Default::default() - }; - let peer = peers[i % peers.len()].clone(); - RegionRoute { - region, - leader_peer: Some(peer.into()), - follower_peers: vec![], // follower_peers is not supported at the moment - leader_status: None, - } - }) - .collect::>(); - - Ok((table_id, region_routes)) -} diff --git a/src/meta-srv/src/test_util.rs b/src/meta-srv/src/test_util.rs index 801b63ab3222..3013ac9ad745 100644 --- a/src/meta-srv/src/test_util.rs +++ b/src/meta-srv/src/test_util.rs @@ -17,6 +17,7 @@ use std::sync::Arc; use chrono::DateTime; use common_catalog::consts::{DEFAULT_CATALOG_NAME, DEFAULT_SCHEMA_NAME, MITO_ENGINE}; +use common_meta::key::table_route::TableRouteValue; use common_meta::key::{TableMetadataManager, TableMetadataManagerRef}; use common_meta::kv_backend::memory::MemoryKvBackend; use common_meta::peer::Peer; @@ -145,7 +146,11 @@ pub(crate) async fn prepare_table_region_and_info_value( region_route_factory(4, 3), ]; table_metadata_manager - .create_table_metadata(table_info, region_routes, HashMap::default()) + .create_table_metadata( + table_info, + TableRouteValue::physical(region_routes), + HashMap::default(), + ) .await .unwrap(); } diff --git a/src/metric-engine/Cargo.toml b/src/metric-engine/Cargo.toml index 2d820b9a2787..def5885cf908 100644 --- a/src/metric-engine/Cargo.toml +++ b/src/metric-engine/Cargo.toml @@ -5,7 +5,6 @@ edition.workspace = true license.workspace = true [dependencies] -ahash.workspace = true api.workspace = true aquamarine.workspace = true async-trait.workspace = true @@ -20,6 +19,7 @@ datafusion.workspace = true datatypes.workspace = true lazy_static = "1.4" mito2.workspace = true +mur3 = "0.1" object-store.workspace = true prometheus.workspace = true serde_json.workspace = true diff --git a/src/metric-engine/src/engine.rs b/src/metric-engine/src/engine.rs index 70038f528b60..5e610c3e91a3 100644 --- a/src/metric-engine/src/engine.rs +++ b/src/metric-engine/src/engine.rs @@ -40,9 +40,6 @@ use self::state::MetricEngineState; use crate::data_region::DataRegion; use crate::metadata_region::MetadataRegion; -/// Fixed random state for generating tsid -pub(crate) const RANDOM_STATE: ahash::RandomState = ahash::RandomState::with_seeds(1, 2, 3, 4); - #[cfg_attr(doc, aquamarine::aquamarine)] /// # Metric Engine /// diff --git a/src/metric-engine/src/engine/put.rs b/src/metric-engine/src/engine/put.rs index a0f187faaa7a..19f40a509975 100644 --- a/src/metric-engine/src/engine/put.rs +++ b/src/metric-engine/src/engine/put.rs @@ -14,7 +14,6 @@ use std::hash::{BuildHasher, Hash, Hasher}; -use ahash::RandomState; use api::v1::value::ValueData; use api::v1::{ColumnDataType, ColumnSchema, Row, Rows, SemanticType}; use common_telemetry::{error, info}; @@ -25,13 +24,16 @@ use store_api::metric_engine_consts::{ use store_api::region_request::{AffectedRows, RegionPutRequest}; use store_api::storage::{RegionId, TableId}; -use crate::engine::{MetricEngineInner, RANDOM_STATE}; +use crate::engine::MetricEngineInner; use crate::error::{ ColumnNotFoundSnafu, ForbiddenPhysicalAlterSnafu, LogicalRegionNotFoundSnafu, Result, }; use crate::metrics::FORBIDDEN_OPERATION_COUNT; use crate::utils::{to_data_region_id, to_metadata_region_id}; +// A random number +const TSID_HASH_SEED: u32 = 846793005; + impl MetricEngineInner { /// Dispatch region put request pub async fn put_region( @@ -174,9 +176,8 @@ impl MetricEngineInner { }); // fill internal columns - let mut random_state = RANDOM_STATE.clone(); for row in &mut rows.rows { - Self::fill_internal_columns(&mut random_state, table_id, &tag_col_indices, row); + Self::fill_internal_columns(table_id, &tag_col_indices, row); } Ok(()) @@ -184,12 +185,11 @@ impl MetricEngineInner { /// Fills internal columns of a row with table name and a hash of tag values. fn fill_internal_columns( - random_state: &mut RandomState, table_id: TableId, tag_col_indices: &[(usize, String)], row: &mut Row, ) { - let mut hasher = random_state.build_hasher(); + let mut hasher = mur3::Hasher128::with_seed(TSID_HASH_SEED); for (idx, name) in tag_col_indices { let tag = row.values[*idx].clone(); name.hash(&mut hasher); @@ -198,7 +198,8 @@ impl MetricEngineInner { string.hash(&mut hasher); } } - let hash = hasher.finish(); + // TSID is 64 bits, simply truncate the 128 bits hash + let (hash, _) = hasher.finish128(); // fill table id and tsid row.values.push(ValueData::U32Value(table_id).into()); @@ -247,15 +248,15 @@ mod tests { .unwrap(); let batches = RecordBatches::try_collect(stream).await.unwrap(); let expected = "\ -+-------------------------+----------------+------------+---------------------+-------+ -| greptime_timestamp | greptime_value | __table_id | __tsid | job | -+-------------------------+----------------+------------+---------------------+-------+ -| 1970-01-01T00:00:00 | 0.0 | 3 | 4844750677434873907 | tag_0 | -| 1970-01-01T00:00:00.001 | 1.0 | 3 | 4844750677434873907 | tag_0 | -| 1970-01-01T00:00:00.002 | 2.0 | 3 | 4844750677434873907 | tag_0 | -| 1970-01-01T00:00:00.003 | 3.0 | 3 | 4844750677434873907 | tag_0 | -| 1970-01-01T00:00:00.004 | 4.0 | 3 | 4844750677434873907 | tag_0 | -+-------------------------+----------------+------------+---------------------+-------+"; ++-------------------------+----------------+------------+----------------------+-------+ +| greptime_timestamp | greptime_value | __table_id | __tsid | job | ++-------------------------+----------------+------------+----------------------+-------+ +| 1970-01-01T00:00:00 | 0.0 | 3 | 12881218023286672757 | tag_0 | +| 1970-01-01T00:00:00.001 | 1.0 | 3 | 12881218023286672757 | tag_0 | +| 1970-01-01T00:00:00.002 | 2.0 | 3 | 12881218023286672757 | tag_0 | +| 1970-01-01T00:00:00.003 | 3.0 | 3 | 12881218023286672757 | tag_0 | +| 1970-01-01T00:00:00.004 | 4.0 | 3 | 12881218023286672757 | tag_0 | ++-------------------------+----------------+------------+----------------------+-------+"; assert_eq!(expected, batches.pretty_print().unwrap(), "physical region"); // read data from logical region diff --git a/src/mito2/src/error.rs b/src/mito2/src/error.rs index b1d48f8c654e..39457281d76b 100644 --- a/src/mito2/src/error.rs +++ b/src/mito2/src/error.rs @@ -416,6 +416,13 @@ pub enum Error { error: ArrowError, location: Location, }, + + #[snafu(display("Invalid file metadata"))] + ConvertMetaData { + location: Location, + #[snafu(source)] + error: parquet::errors::ParquetError, + }, } pub type Result = std::result::Result; @@ -477,6 +484,7 @@ impl ErrorExt for Error { InvalidBatch { .. } => StatusCode::InvalidArguments, InvalidRecordBatch { .. } => StatusCode::InvalidArguments, ConvertVector { source, .. } => source.status_code(), + ConvertMetaData { .. } => StatusCode::Internal, ComputeArrow { .. } => StatusCode::Internal, ComputeVector { .. } => StatusCode::Internal, PrimaryKeyLengthMismatch { .. } => StatusCode::InvalidArguments, diff --git a/src/mito2/src/region/opener.rs b/src/mito2/src/region/opener.rs index ffb3696a97e5..7b969d578d00 100644 --- a/src/mito2/src/region/opener.rs +++ b/src/mito2/src/region/opener.rs @@ -256,6 +256,10 @@ impl RegionOpener { let flushed_entry_id = version.flushed_entry_id; let version_control = Arc::new(VersionControl::new(version)); if !self.skip_wal_replay { + info!( + "Start replaying memtable at flushed_entry_id {} for region {}", + flushed_entry_id, region_id + ); replay_memtable( wal, &wal_options, diff --git a/src/mito2/src/sst/parquet.rs b/src/mito2/src/sst/parquet.rs index af3f8479f39c..584faf1ab964 100644 --- a/src/mito2/src/sst/parquet.rs +++ b/src/mito2/src/sst/parquet.rs @@ -15,6 +15,7 @@ //! SST in parquet format. mod format; +mod helper; mod page_reader; pub mod reader; pub mod row_group; @@ -22,6 +23,7 @@ mod stats; pub mod writer; use common_base::readable_size::ReadableSize; +use parquet::file::metadata::ParquetMetaData; use crate::sst::file::FileTimeRange; @@ -59,6 +61,8 @@ pub struct SstInfo { pub file_size: u64, /// Number of rows. pub num_rows: usize, + /// File Meta Data + pub file_metadata: Option, } #[cfg(test)] @@ -195,4 +199,68 @@ mod tests { }; assert!(cache.as_ref().unwrap().get_pages(&page_key).is_none()); } + + #[tokio::test] + async fn test_parquet_metadata_eq() { + // create test env + let mut env = crate::test_util::TestEnv::new(); + let object_store = env.init_object_store_manager(); + let handle = sst_file_handle(0, 1000); + let file_path = handle.file_path(FILE_DIR); + let metadata = Arc::new(sst_region_metadata()); + let source = new_source(&[ + new_batch_by_range(&["a", "d"], 0, 60), + new_batch_by_range(&["b", "f"], 0, 40), + new_batch_by_range(&["b", "h"], 100, 200), + ]); + let write_opts = WriteOptions { + row_group_size: 50, + ..Default::default() + }; + + // write the sst file and get sst info + // sst info contains the parquet metadata, which is converted from FileMetaData + let mut writer = + ParquetWriter::new(file_path, metadata.clone(), source, object_store.clone()); + let sst_info = writer + .write_all(&write_opts) + .await + .unwrap() + .expect("write_all should return sst info"); + let writer_metadata = sst_info.file_metadata.unwrap(); + + // read the sst file metadata + let builder = ParquetReaderBuilder::new(FILE_DIR.to_string(), handle.clone(), object_store); + let reader = builder.build().await.unwrap(); + let reader_metadata = reader.parquet_metadata(); + + // Because ParquetMetaData doesn't implement PartialEq, + // check all fields manually + macro_rules! assert_metadata { + ( $writer:expr, $reader:expr, $($method:ident,)+ ) => { + $( + assert_eq!($writer.$method(), $reader.$method()); + )+ + } + } + + assert_metadata!( + writer_metadata.file_metadata(), + reader_metadata.file_metadata(), + version, + num_rows, + created_by, + key_value_metadata, + schema_descr, + column_orders, + ); + + assert_metadata!( + writer_metadata, + reader_metadata, + row_groups, + column_index, + offset_index, + ); + } } diff --git a/src/mito2/src/sst/parquet/helper.rs b/src/mito2/src/sst/parquet/helper.rs new file mode 100644 index 000000000000..6e059bd963e5 --- /dev/null +++ b/src/mito2/src/sst/parquet/helper.rs @@ -0,0 +1,86 @@ +// Copyright 2023 Greptime Team +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::sync::Arc; + +use parquet::basic::ColumnOrder; +use parquet::file::metadata::{FileMetaData, ParquetMetaData, RowGroupMetaData}; +use parquet::format; +use parquet::schema::types::{from_thrift, SchemaDescriptor}; +use snafu::ResultExt; + +use crate::error; +use crate::error::Result; + +// Refer to https://github.com/apache/arrow-rs/blob/7e134f4d277c0b62c27529fc15a4739de3ad0afd/parquet/src/file/footer.rs#L74-L90 +/// Convert [format::FileMetaData] to [ParquetMetaData] +pub fn parse_parquet_metadata(t_file_metadata: format::FileMetaData) -> Result { + let schema = from_thrift(&t_file_metadata.schema).context(error::ConvertMetaDataSnafu)?; + let schema_desc_ptr = Arc::new(SchemaDescriptor::new(schema)); + + let mut row_groups = Vec::with_capacity(t_file_metadata.row_groups.len()); + for rg in t_file_metadata.row_groups { + row_groups.push( + RowGroupMetaData::from_thrift(schema_desc_ptr.clone(), rg) + .context(error::ConvertMetaDataSnafu)?, + ); + } + let column_orders = parse_column_orders(t_file_metadata.column_orders, &schema_desc_ptr); + + let file_metadata = FileMetaData::new( + t_file_metadata.version, + t_file_metadata.num_rows, + t_file_metadata.created_by, + t_file_metadata.key_value_metadata, + schema_desc_ptr, + column_orders, + ); + // There may be a problem owing to lacking of column_index and offset_index, + // if we open page index in the future. + Ok(ParquetMetaData::new(file_metadata, row_groups)) +} + +// Port from https://github.com/apache/arrow-rs/blob/7e134f4d277c0b62c27529fc15a4739de3ad0afd/parquet/src/file/footer.rs#L106-L137 +/// Parses column orders from Thrift definition. +/// If no column orders are defined, returns `None`. +fn parse_column_orders( + t_column_orders: Option>, + schema_descr: &SchemaDescriptor, +) -> Option> { + match t_column_orders { + Some(orders) => { + // Should always be the case + assert_eq!( + orders.len(), + schema_descr.num_columns(), + "Column order length mismatch" + ); + let mut res = Vec::with_capacity(schema_descr.num_columns()); + for (i, column) in schema_descr.columns().iter().enumerate() { + match orders[i] { + format::ColumnOrder::TYPEORDER(_) => { + let sort_order = ColumnOrder::get_sort_order( + column.logical_type(), + column.converted_type(), + column.physical_type(), + ); + res.push(ColumnOrder::TYPE_DEFINED_ORDER(sort_order)); + } + } + } + Some(res) + } + None => None, + } +} diff --git a/src/mito2/src/sst/parquet/reader.rs b/src/mito2/src/sst/parquet/reader.rs index 0882ef82c7e3..60729c664283 100644 --- a/src/mito2/src/sst/parquet/reader.rs +++ b/src/mito2/src/sst/parquet/reader.rs @@ -452,4 +452,9 @@ impl ParquetReader { Ok(None) } + + #[cfg(test)] + pub fn parquet_metadata(&self) -> Arc { + self.reader_builder.parquet_meta.clone() + } } diff --git a/src/mito2/src/sst/parquet/writer.rs b/src/mito2/src/sst/parquet/writer.rs index d776b3ac627d..febec27c0d36 100644 --- a/src/mito2/src/sst/parquet/writer.rs +++ b/src/mito2/src/sst/parquet/writer.rs @@ -26,6 +26,7 @@ use snafu::ResultExt; use store_api::metadata::RegionMetadataRef; use store_api::storage::consts::SEQUENCE_COLUMN_NAME; +use super::helper::parse_parquet_metadata; use crate::error::{InvalidMetadataSnafu, Result, WriteBufferSnafu}; use crate::read::{Batch, Source}; use crate::sst::parquet::format::WriteFormat; @@ -107,15 +108,20 @@ impl ParquetWriter { return Ok(None); } - let (_file_meta, file_size) = buffered_writer.close().await.context(WriteBufferSnafu)?; + let (file_meta, file_size) = buffered_writer.close().await.context(WriteBufferSnafu)?; + // Safety: num rows > 0 so we must have min/max. let time_range = stats.time_range.unwrap(); + // convert FileMetaData to ParquetMetaData + let parquet_metadata = parse_parquet_metadata(file_meta)?; + // object_store.write will make sure all bytes are written or an error is raised. Ok(Some(SstInfo { time_range, file_size, num_rows: stats.num_rows, + file_metadata: Some(parquet_metadata), })) } diff --git a/src/operator/src/error.rs b/src/operator/src/error.rs index e96f1aaa21fe..52956e8055f9 100644 --- a/src/operator/src/error.rs +++ b/src/operator/src/error.rs @@ -483,6 +483,12 @@ pub enum Error { location: Location, source: query::error::Error, }, + + #[snafu(display("Invalid table name: {}", table_name))] + InvalidTableName { + table_name: String, + location: Location, + }, } pub type Result = std::result::Result; @@ -507,7 +513,8 @@ impl ErrorExt for Error { | Error::InvalidPartitionColumns { .. } | Error::PrepareFileTable { .. } | Error::InferFileTableSchema { .. } - | Error::SchemaIncompatible { .. } => StatusCode::InvalidArguments, + | Error::SchemaIncompatible { .. } + | Error::InvalidTableName { .. } => StatusCode::InvalidArguments, Error::TableAlreadyExists { .. } => StatusCode::TableAlreadyExists, diff --git a/src/operator/src/statement/ddl.rs b/src/operator/src/statement/ddl.rs index 620e3de6445d..43fdf23a4f5b 100644 --- a/src/operator/src/statement/ddl.rs +++ b/src/operator/src/statement/ddl.rs @@ -50,8 +50,8 @@ use table::TableRef; use super::StatementExecutor; use crate::error::{ self, AlterExprToRequestSnafu, CatalogSnafu, ColumnDataTypeSnafu, ColumnNotFoundSnafu, - DeserializePartitionSnafu, InvalidPartitionColumnsSnafu, ParseSqlSnafu, Result, - SchemaNotFoundSnafu, TableMetadataManagerSnafu, TableNotFoundSnafu, + DeserializePartitionSnafu, InvalidPartitionColumnsSnafu, InvalidTableNameSnafu, ParseSqlSnafu, + Result, SchemaNotFoundSnafu, TableMetadataManagerSnafu, TableNotFoundSnafu, UnrecognizedTableOptionSnafu, }; use crate::expr_factory; @@ -131,8 +131,8 @@ impl StatementExecutor { ensure!( NAME_PATTERN_REG.is_match(&create_table.table_name), - error::UnexpectedSnafu { - violated: format!("Invalid table name: {}", create_table.table_name) + InvalidTableNameSnafu { + table_name: create_table.table_name.clone(), } ); diff --git a/src/operator/src/tests/partition_manager.rs b/src/operator/src/tests/partition_manager.rs index c0d2a9f74f6b..dd2a044b51c3 100644 --- a/src/operator/src/tests/partition_manager.rs +++ b/src/operator/src/tests/partition_manager.rs @@ -17,6 +17,7 @@ use std::sync::atomic::{AtomicU32, Ordering}; use std::sync::Arc; use catalog::kvbackend::MetaKvBackend; +use common_meta::key::table_route::TableRouteValue; use common_meta::key::TableMetadataManager; use common_meta::kv_backend::memory::MemoryKvBackend; use common_meta::kv_backend::KvBackendRef; @@ -114,7 +115,7 @@ pub(crate) async fn create_partition_rule_manager( table_metadata_manager .create_table_metadata( new_test_table_info(1, "table_1", regions.clone().into_iter()).into(), - vec![ + TableRouteValue::physical(vec![ RegionRoute { region: Region { id: 3.into(), @@ -169,7 +170,7 @@ pub(crate) async fn create_partition_rule_manager( follower_peers: vec![], leader_status: None, }, - ], + ]), region_wal_options.clone(), ) .await @@ -178,7 +179,7 @@ pub(crate) async fn create_partition_rule_manager( table_metadata_manager .create_table_metadata( new_test_table_info(2, "table_2", regions.clone().into_iter()).into(), - vec![ + TableRouteValue::physical(vec![ RegionRoute { region: Region { id: 1.into(), @@ -239,7 +240,7 @@ pub(crate) async fn create_partition_rule_manager( follower_peers: vec![], leader_status: None, }, - ], + ]), region_wal_options, ) .await diff --git a/src/servers/Cargo.toml b/src/servers/Cargo.toml index e81dfaf593c8..727f4817e798 100644 --- a/src/servers/Cargo.toml +++ b/src/servers/Cargo.toml @@ -58,7 +58,7 @@ openmetrics-parser = "0.4" opensrv-mysql = { git = "https://github.com/MichaelScofield/opensrv.git", rev = "1676c1d" } opentelemetry-proto.workspace = true parking_lot = "0.12" -pgwire = "0.17" +pgwire = "0.18" pin-project = "1.0" postgres-types = { version = "0.2", features = ["with-chrono-0_4"] } pprof = { version = "0.13", features = [ diff --git a/src/servers/src/error.rs b/src/servers/src/error.rs index d73fcbe91397..70b4401c9a73 100644 --- a/src/servers/src/error.rs +++ b/src/servers/src/error.rs @@ -24,7 +24,7 @@ use catalog; use common_error::ext::{BoxedError, ErrorExt}; use common_error::status_code::StatusCode; use common_macro::stack_trace_debug; -use common_telemetry::logging; +use common_telemetry::{debug, error}; use datatypes::prelude::ConcreteDataType; use query::parser::PromQuery; use serde_json::json; @@ -620,7 +620,11 @@ impl IntoResponse for Error { | Error::InvalidQuery { .. } | Error::TimePrecision { .. } => HttpStatusCode::BAD_REQUEST, _ => { - logging::error!(self; "Failed to handle HTTP request"); + if self.status_code().should_log_error() { + error!(self; "Failed to handle HTTP request: "); + } else { + debug!("Failed to handle HTTP request: {self}"); + } HttpStatusCode::INTERNAL_SERVER_ERROR } diff --git a/src/servers/src/postgres.rs b/src/servers/src/postgres.rs index 72039b7df722..0836ea51bb21 100644 --- a/src/servers/src/postgres.rs +++ b/src/servers/src/postgres.rs @@ -31,7 +31,6 @@ use std::sync::Arc; use ::auth::UserProviderRef; use derive_builder::Builder; use pgwire::api::auth::ServerParameterProvider; -use pgwire::api::store::MemPortalStore; use pgwire::api::ClientInfo; pub use server::PostgresServer; use session::context::Channel; @@ -40,7 +39,6 @@ use session::Session; use self::auth_handler::PgLoginVerifier; use self::handler::DefaultQueryParser; use crate::query_handler::sql::ServerSqlQueryHandlerRef; -use crate::SqlPlan; pub(crate) struct GreptimeDBStartupParameters { version: &'static str, @@ -76,7 +74,6 @@ pub struct PostgresServerHandler { param_provider: Arc, session: Arc, - portal_store: Arc>, query_parser: Arc, } @@ -99,7 +96,6 @@ impl MakePostgresServerHandler { param_provider: self.param_provider.clone(), session: session.clone(), - portal_store: Arc::new(MemPortalStore::new()), query_parser: Arc::new(DefaultQueryParser::new(self.query_handler.clone(), session)), } } diff --git a/src/servers/src/postgres/handler.rs b/src/servers/src/postgres/handler.rs index edb4b2f593e0..5356057238a8 100644 --- a/src/servers/src/postgres/handler.rs +++ b/src/servers/src/postgres/handler.rs @@ -25,7 +25,6 @@ use pgwire::api::portal::{Format, Portal}; use pgwire::api::query::{ExtendedQueryHandler, SimpleQueryHandler, StatementOrPortal}; use pgwire::api::results::{DataRowEncoder, DescribeResponse, QueryResponse, Response, Tag}; use pgwire::api::stmt::QueryParser; -use pgwire::api::store::MemPortalStore; use pgwire::api::{ClientInfo, Type}; use pgwire::error::{ErrorInfo, PgWireError, PgWireResult}; use query::query_engine::DescribeResult; @@ -192,11 +191,6 @@ impl QueryParser for DefaultQueryParser { impl ExtendedQueryHandler for PostgresServerHandler { type Statement = SqlPlan; type QueryParser = DefaultQueryParser; - type PortalStore = MemPortalStore; - - fn portal_store(&self) -> Arc { - self.portal_store.clone() - } fn query_parser(&self) -> Arc { self.query_parser.clone() diff --git a/src/store-api/src/logstore.rs b/src/store-api/src/logstore.rs index 98d7c00ab366..16809c26b1a1 100644 --- a/src/store-api/src/logstore.rs +++ b/src/store-api/src/logstore.rs @@ -49,7 +49,6 @@ pub trait LogStore: Send + Sync + 'static + std::fmt::Debug { /// Creates a new `EntryStream` to asynchronously generates `Entry` with ids /// starting from `id`. - // TODO(niebayes): update docs for entry id. async fn read( &self, ns: &Self::Namespace, diff --git a/tests-integration/tests/http.rs b/tests-integration/tests/http.rs index 5a8f2aa4aaf7..9341ba5f09ce 100644 --- a/tests-integration/tests/http.rs +++ b/tests-integration/tests/http.rs @@ -747,6 +747,7 @@ enable = true [frontend.logging] enable_otlp_tracing = false +append_stdout = true [frontend.datanode.client] timeout = "10s" @@ -815,6 +816,7 @@ parallel_scan_channel_size = 32 [datanode.logging] enable_otlp_tracing = false +append_stdout = true [datanode.export_metrics] enable = false @@ -824,7 +826,11 @@ write_interval = "30s" [datanode.export_metrics.headers] [logging] -enable_otlp_tracing = false"#, +enable_otlp_tracing = false +append_stdout = true + +[wal_meta] +provider = "raft_engine""#, store_type, ); let body_text = drop_lines_with_inconsistent_results(res_get.text().await); diff --git a/tests/cases/standalone/common/create/create.result b/tests/cases/standalone/common/create/create.result index 08e4b658de2b..436cbfb393db 100644 --- a/tests/cases/standalone/common/create/create.result +++ b/tests/cases/standalone/common/create/create.result @@ -52,7 +52,7 @@ Error: 4000(TableAlreadyExists), Table already exists: `greptime.public.test2` CREATE TABLE 'N.~' (i TIMESTAMP TIME INDEX); -Error: 1002(Unexpected), Unexpected, violated: Invalid table name: N.~ +Error: 1004(InvalidArguments), Invalid table name: N.~ DESC TABLE integers; diff --git a/tests/runner/src/env.rs b/tests/runner/src/env.rs index b5218979821a..1bd7ad36496a 100644 --- a/tests/runner/src/env.rs +++ b/tests/runner/src/env.rs @@ -190,7 +190,7 @@ impl Env { "start".to_string(), "-c".to_string(), self.generate_config_file(subcommand, db_ctx), - "--http-addr=127.0.0.1:5001".to_string(), + "--http-addr=127.0.0.1:5002".to_string(), ]; (args, SERVER_ADDR.to_string()) } @@ -213,7 +213,7 @@ impl Env { "true".to_string(), "--enable-region-failover".to_string(), "false".to_string(), - "--http-addr=127.0.0.1:5001".to_string(), + "--http-addr=127.0.0.1:5002".to_string(), ]; (args, METASRV_ADDR.to_string()) }